From 16af85004c84d0d6c626b4f8424ce9647669a0c1 Mon Sep 17 00:00:00 2001 From: RaNaN Date: Sun, 9 Jun 2013 18:10:22 +0200 Subject: moved everything from module to pyload --- pyload/AccountManager.py | 141 + pyload/AddonManager.py | 248 ++ pyload/Api.py | 206 ++ pyload/FileManager.py | 582 ++++ pyload/InitHomeDir.py | 87 + pyload/PluginManager.py | 426 +++ pyload/Scheduler.py | 141 + pyload/Setup.py | 418 +++ pyload/__init__.py | 0 pyload/api/AccountApi.py | 54 + pyload/api/AddonApi.py | 27 + pyload/api/ApiComponent.py | 23 + pyload/api/CollectorApi.py | 37 + pyload/api/ConfigApi.py | 134 + pyload/api/CoreApi.py | 131 + pyload/api/DownloadApi.py | 182 ++ pyload/api/DownloadPreparingApi.py | 121 + pyload/api/FileApi.py | 169 + pyload/api/UserInteractionApi.py | 61 + pyload/api/__init__.py | 8 + pyload/cli/AddPackage.py | 66 + pyload/cli/Handler.py | 48 + pyload/cli/ManageFiles.py | 204 ++ pyload/cli/__init__.py | 2 + pyload/cli/printer.py | 26 + pyload/config/ConfigManager.py | 139 + pyload/config/ConfigParser.py | 207 ++ pyload/config/__init__.py | 1 + pyload/config/default.py | 107 + pyload/database/AccountDatabase.py | 25 + pyload/database/ConfigDatabase.py | 56 + pyload/database/DatabaseBackend.py | 492 +++ pyload/database/FileDatabase.py | 448 +++ pyload/database/StatisticDatabase.py | 13 + pyload/database/StorageDatabase.py | 48 + pyload/database/UserDatabase.py | 125 + pyload/database/__init__.py | 8 + pyload/datatypes/PyFile.py | 268 ++ pyload/datatypes/PyPackage.py | 115 + pyload/datatypes/User.py | 63 + pyload/datatypes/__init__.py | 0 pyload/interaction/EventManager.py | 84 + pyload/interaction/InteractionManager.py | 166 + pyload/interaction/InteractionTask.py | 100 + pyload/interaction/__init__.py | 2 + pyload/lib/Getch.py | 76 + pyload/lib/ReadWriteLock.py | 232 ++ pyload/lib/SafeEval.py | 47 + pyload/lib/__init__.py | 0 pyload/lib/beaker/__init__.py | 1 + pyload/lib/beaker/cache.py | 459 +++ pyload/lib/beaker/container.py | 633 ++++ pyload/lib/beaker/converters.py | 26 + pyload/lib/beaker/crypto/__init__.py | 40 + pyload/lib/beaker/crypto/jcecrypto.py | 30 + pyload/lib/beaker/crypto/pbkdf2.py | 342 ++ pyload/lib/beaker/crypto/pycrypto.py | 31 + pyload/lib/beaker/crypto/util.py | 30 + pyload/lib/beaker/exceptions.py | 24 + pyload/lib/beaker/ext/__init__.py | 0 pyload/lib/beaker/ext/database.py | 165 + pyload/lib/beaker/ext/google.py | 120 + pyload/lib/beaker/ext/memcached.py | 82 + pyload/lib/beaker/ext/sqla.py | 133 + pyload/lib/beaker/middleware.py | 165 + pyload/lib/beaker/session.py | 618 ++++ pyload/lib/beaker/synchronization.py | 381 +++ pyload/lib/beaker/util.py | 302 ++ pyload/lib/bottle.py | 3251 ++++++++++++++++++++ pyload/lib/forwarder.py | 73 + pyload/lib/hg_tool.py | 133 + pyload/lib/mod_pywebsocket/COPYING | 28 + pyload/lib/mod_pywebsocket/__init__.py | 197 ++ pyload/lib/mod_pywebsocket/_stream_base.py | 165 + pyload/lib/mod_pywebsocket/_stream_hixie75.py | 229 ++ pyload/lib/mod_pywebsocket/_stream_hybi.py | 915 ++++++ pyload/lib/mod_pywebsocket/common.py | 307 ++ pyload/lib/mod_pywebsocket/dispatch.py | 387 +++ pyload/lib/mod_pywebsocket/extensions.py | 727 +++++ pyload/lib/mod_pywebsocket/handshake/__init__.py | 110 + pyload/lib/mod_pywebsocket/handshake/_base.py | 226 ++ pyload/lib/mod_pywebsocket/handshake/hybi.py | 404 +++ pyload/lib/mod_pywebsocket/handshake/hybi00.py | 242 ++ pyload/lib/mod_pywebsocket/headerparserhandler.py | 244 ++ pyload/lib/mod_pywebsocket/http_header_util.py | 263 ++ pyload/lib/mod_pywebsocket/memorizingfile.py | 99 + pyload/lib/mod_pywebsocket/msgutil.py | 219 ++ pyload/lib/mod_pywebsocket/mux.py | 1636 ++++++++++ pyload/lib/mod_pywebsocket/standalone.py | 998 ++++++ pyload/lib/mod_pywebsocket/stream.py | 57 + pyload/lib/mod_pywebsocket/util.py | 515 ++++ pyload/lib/new_collections.py | 375 +++ pyload/lib/rename_process.py | 14 + pyload/lib/simplejson/__init__.py | 466 +++ pyload/lib/simplejson/decoder.py | 421 +++ pyload/lib/simplejson/encoder.py | 534 ++++ pyload/lib/simplejson/ordered_dict.py | 119 + pyload/lib/simplejson/scanner.py | 77 + pyload/lib/simplejson/tool.py | 39 + pyload/lib/wsgiserver/LICENSE.txt | 25 + pyload/lib/wsgiserver/__init__.py | 1794 +++++++++++ pyload/network/Browser.py | 153 + pyload/network/Bucket.py | 63 + pyload/network/CookieJar.py | 53 + pyload/network/HTTPChunk.py | 298 ++ pyload/network/HTTPDownload.py | 338 ++ pyload/network/HTTPRequest.py | 324 ++ pyload/network/RequestFactory.py | 114 + pyload/network/XDCCRequest.py | 162 + pyload/network/__init__.py | 1 + pyload/plugins/Account.py | 295 ++ pyload/plugins/Addon.py | 205 ++ pyload/plugins/Base.py | 343 +++ pyload/plugins/Crypter.py | 271 ++ pyload/plugins/Hoster.py | 400 +++ pyload/plugins/MultiHoster.py | 73 + pyload/plugins/ReCaptcha.py | 22 + pyload/plugins/__init__.py | 0 pyload/plugins/accounts/AlldebridCom.py | 49 + pyload/plugins/accounts/BayfilesCom.py | 51 + pyload/plugins/accounts/BitshareCom.py | 44 + pyload/plugins/accounts/BoltsharingCom.py | 12 + pyload/plugins/accounts/CramitIn.py | 12 + pyload/plugins/accounts/CyberlockerCh.py | 31 + pyload/plugins/accounts/CzshareCom.py | 57 + pyload/plugins/accounts/DdlstorageCom.py | 12 + pyload/plugins/accounts/DebridItaliaCom.py | 50 + pyload/plugins/accounts/DepositfilesCom.py | 47 + pyload/plugins/accounts/EasybytezCom.py | 73 + pyload/plugins/accounts/EgoFilesCom.py | 40 + pyload/plugins/accounts/EuroshareEu.py | 55 + pyload/plugins/accounts/FastshareCz.py | 54 + pyload/plugins/accounts/FilebeerInfo.py | 57 + pyload/plugins/accounts/FilecloudIo.py | 49 + pyload/plugins/accounts/FilefactoryCom.py | 54 + pyload/plugins/accounts/FilejungleCom.py | 60 + pyload/plugins/accounts/FilerioCom.py | 12 + pyload/plugins/accounts/FilesMailRu.py | 41 + pyload/plugins/accounts/FileserveCom.py | 58 + pyload/plugins/accounts/FourSharedCom.py | 48 + pyload/plugins/accounts/FreakshareCom.py | 51 + pyload/plugins/accounts/FshareVn.py | 62 + pyload/plugins/accounts/Ftp.py | 13 + pyload/plugins/accounts/HellshareCz.py | 87 + pyload/plugins/accounts/HellspyCz.py | 70 + pyload/plugins/accounts/HotfileCom.py | 86 + pyload/plugins/accounts/Http.py | 13 + pyload/plugins/accounts/MegasharesCom.py | 42 + pyload/plugins/accounts/MultiDebridCom.py | 47 + pyload/plugins/accounts/MultishareCz.py | 58 + pyload/plugins/accounts/NetloadIn.py | 49 + pyload/plugins/accounts/Premium4Me.py | 27 + pyload/plugins/accounts/PremiumizeMe.py | 44 + pyload/plugins/accounts/QuickshareCz.py | 53 + pyload/plugins/accounts/RapidgatorNet.py | 74 + pyload/plugins/accounts/RapidshareCom.py | 68 + pyload/plugins/accounts/RarefileNet.py | 12 + pyload/plugins/accounts/RealdebridCom.py | 35 + pyload/plugins/accounts/RehostTo.py | 37 + pyload/plugins/accounts/ReloadCc.py | 73 + pyload/plugins/accounts/RyushareCom.py | 18 + pyload/plugins/accounts/Share76Com.py | 11 + pyload/plugins/accounts/ShareFilesCo.py | 12 + pyload/plugins/accounts/ShareRapidCom.py | 47 + pyload/plugins/accounts/ShareonlineBiz.py | 56 + pyload/plugins/accounts/SpeedLoadOrg.py | 12 + pyload/plugins/accounts/StahnuTo.py | 49 + pyload/plugins/accounts/TurbobitNet.py | 56 + pyload/plugins/accounts/UlozTo.py | 36 + pyload/plugins/accounts/UploadedTo.py | 65 + pyload/plugins/accounts/UploadheroCom.py | 35 + pyload/plugins/accounts/UploadingCom.py | 52 + pyload/plugins/accounts/UploadstationCom.py | 13 + pyload/plugins/accounts/UptoboxCom.py | 12 + pyload/plugins/accounts/WarserverCz.py | 70 + pyload/plugins/accounts/WuploadCom.py | 47 + pyload/plugins/accounts/X7To.py | 66 + pyload/plugins/accounts/YibaishiwuCom.py | 51 + pyload/plugins/accounts/ZeveraCom.py | 49 + pyload/plugins/accounts/__init__.py | 0 pyload/plugins/addons/AlldebridCom.py | 28 + pyload/plugins/addons/BypassCaptcha.py | 143 + pyload/plugins/addons/CaptchaBrotherhood.py | 169 + pyload/plugins/addons/CaptchaTrader.py | 159 + pyload/plugins/addons/Checksum.py | 169 + pyload/plugins/addons/ClickAndLoad.py | 89 + pyload/plugins/addons/DeathByCaptcha.py | 210 ++ pyload/plugins/addons/DebridItaliaCom.py | 41 + pyload/plugins/addons/DeleteFinished.py | 86 + pyload/plugins/addons/DownloadScheduler.py | 86 + pyload/plugins/addons/EasybytezCom.py | 31 + pyload/plugins/addons/Ev0InFetcher.py | 87 + pyload/plugins/addons/ExpertDecoders.py | 112 + pyload/plugins/addons/ExternalScripts.py | 118 + pyload/plugins/addons/ExtractArchive.py | 312 ++ pyload/plugins/addons/HotFolder.py | 85 + pyload/plugins/addons/IRCInterface.py | 431 +++ pyload/plugins/addons/ImageTyperz.py | 160 + pyload/plugins/addons/LinkdecrypterCom.py | 59 + pyload/plugins/addons/MergeFiles.py | 94 + pyload/plugins/addons/MultiDebridCom.py | 42 + pyload/plugins/addons/MultiHome.py | 82 + pyload/plugins/addons/MultiHoster.py | 102 + pyload/plugins/addons/MultishareCz.py | 23 + pyload/plugins/addons/Premium4Me.py | 33 + pyload/plugins/addons/PremiumizeMe.py | 50 + pyload/plugins/addons/RealdebridCom.py | 25 + pyload/plugins/addons/RehostTo.py | 41 + pyload/plugins/addons/RestartFailed.py | 124 + pyload/plugins/addons/UnSkipOnFail.py | 97 + pyload/plugins/addons/UpdateManager.py | 201 ++ pyload/plugins/addons/XFileSharingPro.py | 70 + pyload/plugins/addons/XMPPInterface.py | 276 ++ pyload/plugins/addons/ZeveraCom.py | 19 + pyload/plugins/addons/__init__.py | 0 pyload/plugins/crypter/C1neonCom.py | 133 + pyload/plugins/crypter/CCF.py | 42 + pyload/plugins/crypter/CrockoComFolder.py | 14 + pyload/plugins/crypter/CryptItCom.py | 38 + pyload/plugins/crypter/CzshareComFolder.py | 30 + pyload/plugins/crypter/DDLMusicOrg.py | 42 + pyload/plugins/crypter/DataHuFolder.py | 55 + pyload/plugins/crypter/DdlstorageComFolder.py | 32 + pyload/plugins/crypter/DepositfilesComFolder.py | 14 + pyload/plugins/crypter/Dereferer.py | 34 + pyload/plugins/crypter/DontKnowMe.py | 21 + pyload/plugins/crypter/DownloadVimeoCom.py | 30 + pyload/plugins/crypter/DuckCryptInfo.py | 58 + pyload/plugins/crypter/EasybytezComFolder.py | 60 + pyload/plugins/crypter/EmbeduploadCom.py | 54 + pyload/plugins/crypter/FilebeerInfoFolder.py | 35 + pyload/plugins/crypter/FilefactoryComFolder.py | 44 + pyload/plugins/crypter/FileserveComFolder.py | 32 + pyload/plugins/crypter/FourChanOrg.py | 25 + pyload/plugins/crypter/FshareVnFolder.py | 14 + pyload/plugins/crypter/GooGl.py | 41 + pyload/plugins/crypter/HoerbuchIn.py | 55 + pyload/plugins/crypter/HotfileFolderCom.py | 29 + pyload/plugins/crypter/ILoadTo.py | 62 + pyload/plugins/crypter/LetitbitNetFolder.py | 33 + pyload/plugins/crypter/LinkList.py | 55 + pyload/plugins/crypter/LinkSaveIn.py | 227 ++ pyload/plugins/crypter/LinkdecrypterCom.py | 100 + pyload/plugins/crypter/LixIn.py | 60 + pyload/plugins/crypter/LofCc.py | 49 + pyload/plugins/crypter/MBLinkInfo.py | 27 + pyload/plugins/crypter/MediafireComFolder.py | 55 + pyload/plugins/crypter/Movie2kTo.py | 151 + pyload/plugins/crypter/MultiloadCz.py | 41 + pyload/plugins/crypter/MultiuploadCom.py | 58 + pyload/plugins/crypter/NCryptIn.py | 251 ++ pyload/plugins/crypter/NetfolderIn.py | 71 + pyload/plugins/crypter/OneKhDe.py | 36 + pyload/plugins/crypter/OronComFolder.py | 46 + pyload/plugins/crypter/QuickshareCzFolder.py | 30 + pyload/plugins/crypter/RSDF.py | 49 + pyload/plugins/crypter/RSLayerCom.py | 49 + pyload/plugins/crypter/RelinkUs.py | 264 ++ pyload/plugins/crypter/SecuredIn.py | 334 ++ pyload/plugins/crypter/SerienjunkiesOrg.py | 321 ++ pyload/plugins/crypter/ShareLinksBiz.py | 269 ++ pyload/plugins/crypter/ShareRapidComFolder.py | 14 + pyload/plugins/crypter/SpeedLoadOrgFolder.py | 30 + pyload/plugins/crypter/StealthTo.py | 45 + pyload/plugins/crypter/TrailerzoneInfo.py | 45 + pyload/plugins/crypter/UlozToFolder.py | 40 + pyload/plugins/crypter/UploadedToFolder.py | 50 + pyload/plugins/crypter/WiiReloadedOrg.py | 52 + pyload/plugins/crypter/XfilesharingProFolder.py | 34 + pyload/plugins/crypter/YoutubeBatch.py | 42 + pyload/plugins/crypter/__init__.py | 0 pyload/plugins/hooks/Captcha9kw.py | 162 + pyload/plugins/hooks/ReloadCc.py | 65 + pyload/plugins/hoster/ARD.py | 80 + pyload/plugins/hoster/AlldebridCom.py | 84 + pyload/plugins/hoster/BasePlugin.py | 104 + pyload/plugins/hoster/BayfilesCom.py | 93 + pyload/plugins/hoster/BezvadataCz.py | 94 + pyload/plugins/hoster/BillionuploadsCom.py | 17 + pyload/plugins/hoster/BitshareCom.py | 179 ++ pyload/plugins/hoster/BoltsharingCom.py | 15 + pyload/plugins/hoster/CatShareNet.py | 38 + pyload/plugins/hoster/ChipDe.py | 24 + pyload/plugins/hoster/CloudzerNet.py | 65 + pyload/plugins/hoster/CramitIn.py | 20 + pyload/plugins/hoster/CrockoCom.py | 71 + pyload/plugins/hoster/CyberlockerCh.py | 15 + pyload/plugins/hoster/CzshareCom.py | 160 + pyload/plugins/hoster/DailymotionCom.py | 47 + pyload/plugins/hoster/DataHu.py | 53 + pyload/plugins/hoster/DataportCz.py | 68 + pyload/plugins/hoster/DateiTo.py | 94 + pyload/plugins/hoster/DdlstorageCom.py | 20 + pyload/plugins/hoster/DebridItaliaCom.py | 61 + pyload/plugins/hoster/DepositfilesCom.py | 112 + pyload/plugins/hoster/DlFreeFr.py | 183 ++ pyload/plugins/hoster/EasybytezCom.py | 47 + pyload/plugins/hoster/EdiskCz.py | 62 + pyload/plugins/hoster/EgoFilesCom.py | 103 + pyload/plugins/hoster/EuroshareEu.py | 74 + pyload/plugins/hoster/ExtabitCom.py | 87 + pyload/plugins/hoster/FastshareCz.py | 96 + pyload/plugins/hoster/FileApeCom.py | 62 + pyload/plugins/hoster/FilebeerInfo.py | 15 + pyload/plugins/hoster/FilecloudIo.py | 112 + pyload/plugins/hoster/FilefactoryCom.py | 133 + pyload/plugins/hoster/FilejungleCom.py | 38 + pyload/plugins/hoster/FilepostCom.py | 135 + pyload/plugins/hoster/FilerNet.py | 103 + pyload/plugins/hoster/FilerioCom.py | 20 + pyload/plugins/hoster/FilesMailRu.py | 99 + pyload/plugins/hoster/FileserveCom.py | 211 ++ pyload/plugins/hoster/FileshareInUa.py | 78 + pyload/plugins/hoster/FlyFilesNet.py | 41 + pyload/plugins/hoster/FourSharedCom.py | 53 + pyload/plugins/hoster/FreakshareCom.py | 167 + pyload/plugins/hoster/FreevideoCz.py | 64 + pyload/plugins/hoster/FshareVn.py | 111 + pyload/plugins/hoster/Ftp.py | 91 + pyload/plugins/hoster/GamefrontCom.py | 80 + pyload/plugins/hoster/GigapetaCom.py | 73 + pyload/plugins/hoster/HellshareCz.py | 56 + pyload/plugins/hoster/HellspyCz.py | 70 + pyload/plugins/hoster/HotfileCom.py | 137 + pyload/plugins/hoster/IFileWs.py | 20 + pyload/plugins/hoster/IcyFilesCom.py | 112 + pyload/plugins/hoster/IfileIt.py | 74 + pyload/plugins/hoster/IfolderRu.py | 90 + pyload/plugins/hoster/JumbofilesCom.py | 31 + pyload/plugins/hoster/LetitbitNet.py | 125 + pyload/plugins/hoster/LoadTo.py | 83 + pyload/plugins/hoster/LuckyShareNet.py | 72 + pyload/plugins/hoster/MediafireCom.py | 135 + pyload/plugins/hoster/MegaNz.py | 125 + pyload/plugins/hoster/MegasharesCom.py | 108 + pyload/plugins/hoster/MovReelCom.py | 106 + pyload/plugins/hoster/MultiDebridCom.py | 57 + pyload/plugins/hoster/MultishareCz.py | 76 + pyload/plugins/hoster/MyvideoDe.py | 43 + pyload/plugins/hoster/NarodRu.py | 66 + pyload/plugins/hoster/NetloadIn.py | 252 ++ pyload/plugins/hoster/NovafileCom.py | 24 + pyload/plugins/hoster/NowDownloadEu.py | 66 + pyload/plugins/hoster/OneFichierCom.py | 58 + pyload/plugins/hoster/PornhostCom.py | 76 + pyload/plugins/hoster/PornhubCom.py | 83 + pyload/plugins/hoster/Premium4Me.py | 70 + pyload/plugins/hoster/PremiumizeMe.py | 50 + pyload/plugins/hoster/PutlockerCom.py | 78 + pyload/plugins/hoster/QuickshareCz.py | 99 + pyload/plugins/hoster/RapidgatorNet.py | 192 ++ pyload/plugins/hoster/RapidshareCom.py | 225 ++ pyload/plugins/hoster/RarefileNet.py | 34 + pyload/plugins/hoster/RealdebridCom.py | 88 + pyload/plugins/hoster/RedtubeCom.py | 56 + pyload/plugins/hoster/RehostTo.py | 37 + pyload/plugins/hoster/ReloadCc.py | 103 + pyload/plugins/hoster/RyushareCom.py | 55 + pyload/plugins/hoster/SecureUploadEu.py | 18 + pyload/plugins/hoster/SendmywayCom.py | 18 + pyload/plugins/hoster/SendspaceCom.py | 67 + pyload/plugins/hoster/Share4webCom.py | 16 + pyload/plugins/hoster/Share76Com.py | 19 + pyload/plugins/hoster/ShareFilesCo.py | 24 + pyload/plugins/hoster/ShareRapidCom.py | 104 + pyload/plugins/hoster/SharebeesCom.py | 19 + pyload/plugins/hoster/ShareonlineBiz.py | 203 ++ pyload/plugins/hoster/ShareplaceCom.py | 84 + pyload/plugins/hoster/ShragleCom.py | 106 + pyload/plugins/hoster/SpeedLoadOrg.py | 21 + pyload/plugins/hoster/SpeedfileCz.py | 65 + pyload/plugins/hoster/StreamCz.py | 76 + pyload/plugins/hoster/TurbobitNet.py | 170 + pyload/plugins/hoster/TurbouploadCom.py | 45 + pyload/plugins/hoster/TusfilesNet.py | 18 + pyload/plugins/hoster/TwoSharedCom.py | 33 + pyload/plugins/hoster/UlozTo.py | 156 + pyload/plugins/hoster/UloziskoSk.py | 75 + pyload/plugins/hoster/UnibytesCom.py | 80 + pyload/plugins/hoster/UploadStationCom.py | 21 + pyload/plugins/hoster/UploadedTo.py | 238 ++ pyload/plugins/hoster/UploadheroCom.py | 84 + pyload/plugins/hoster/UploadingCom.py | 110 + pyload/plugins/hoster/UptoboxCom.py | 21 + pyload/plugins/hoster/VeehdCom.py | 80 + pyload/plugins/hoster/WarserverCz.py | 70 + pyload/plugins/hoster/WebshareCz.py | 48 + pyload/plugins/hoster/WrzucTo.py | 58 + pyload/plugins/hoster/WuploadCom.py | 241 ++ pyload/plugins/hoster/X7To.py | 93 + pyload/plugins/hoster/XFileSharingPro.py | 314 ++ pyload/plugins/hoster/XHamsterCom.py | 120 + pyload/plugins/hoster/XVideosCom.py | 19 + pyload/plugins/hoster/Xdcc.py | 229 ++ pyload/plugins/hoster/XvidstageCom.py | 114 + pyload/plugins/hoster/YibaishiwuCom.py | 54 + pyload/plugins/hoster/YoupornCom.py | 56 + pyload/plugins/hoster/YourfilesTo.py | 83 + pyload/plugins/hoster/YoutubeCom.py | 164 + pyload/plugins/hoster/ZDF.py | 46 + pyload/plugins/hoster/ZeveraCom.py | 108 + pyload/plugins/hoster/ZippyshareCom.py | 187 ++ pyload/plugins/hoster/__init__.py | 0 pyload/plugins/internal/AbstractExtractor.py | 93 + pyload/plugins/internal/CaptchaService.py | 77 + pyload/plugins/internal/DeadHoster.py | 18 + pyload/plugins/internal/NetloadInOCR.py | 27 + pyload/plugins/internal/OCR.py | 314 ++ pyload/plugins/internal/ShareonlineBizOCR.py | 53 + pyload/plugins/internal/SimpleCrypter.py | 68 + pyload/plugins/internal/SimpleHoster.py | 251 ++ pyload/plugins/internal/UnRar.py | 212 ++ pyload/plugins/internal/UnZip.py | 49 + pyload/plugins/internal/XFSPAccount.py | 79 + pyload/plugins/internal/__init__.py | 0 pyload/remote/ClickAndLoadBackend.py | 170 + pyload/remote/JSONClient.py | 56 + pyload/remote/RemoteManager.py | 89 + pyload/remote/WSClient.py | 59 + pyload/remote/WebSocketBackend.py | 49 + pyload/remote/__init__.py | 0 pyload/remote/apitypes.py | 536 ++++ pyload/remote/apitypes_debug.py | 135 + pyload/remote/create_apitypes.py | 180 ++ pyload/remote/create_jstypes.py | 36 + pyload/remote/json_converter.py | 64 + pyload/remote/pyload.thrift | 538 ++++ pyload/remote/ttypes.py | 534 ++++ pyload/remote/wsbackend/AbstractHandler.py | 133 + pyload/remote/wsbackend/ApiHandler.py | 81 + pyload/remote/wsbackend/AsyncHandler.py | 167 + pyload/remote/wsbackend/Dispatcher.py | 31 + pyload/remote/wsbackend/Server.py | 733 +++++ pyload/remote/wsbackend/__init__.py | 0 pyload/setup/System_Checks.py | 126 + pyload/threads/AddonThread.py | 65 + pyload/threads/BaseThread.py | 142 + pyload/threads/DecrypterThread.py | 81 + pyload/threads/DownloadThread.py | 231 ++ pyload/threads/InfoThread.py | 168 + pyload/threads/ThreadManager.py | 313 ++ pyload/threads/__init__.py | 0 pyload/utils/ImportDebugger.py | 19 + pyload/utils/JsEngine.py | 195 ++ pyload/utils/__init__.py | 253 ++ pyload/utils/fs.py | 78 + pyload/utils/json_layer.py | 15 + pyload/utils/packagetools.py | 155 + pyload/utils/pylgettext.py | 61 + pyload/web/.bowerrc | 3 + pyload/web/.jshintrc | 29 + pyload/web/Gruntfile.js | 361 +++ pyload/web/ServerThread.py | 143 + pyload/web/__init__.py | 0 pyload/web/api_app.py | 112 + pyload/web/app/favicon.ico | Bin 0 -> 6006 bytes pyload/web/app/fonts/Sansation_Bold-webfont.eot | Bin 0 -> 35336 bytes pyload/web/app/fonts/Sansation_Bold-webfont.ttf | Bin 0 -> 35160 bytes pyload/web/app/fonts/Sansation_Bold-webfont.woff | Bin 0 -> 18496 bytes pyload/web/app/fonts/Sansation_Light-webfont.eot | Bin 0 -> 36700 bytes pyload/web/app/fonts/Sansation_Light-webfont.ttf | Bin 0 -> 36520 bytes pyload/web/app/fonts/Sansation_Light-webfont.woff | Bin 0 -> 18408 bytes pyload/web/app/fonts/Sansation_Regular-webfont.eot | Bin 0 -> 36368 bytes pyload/web/app/fonts/Sansation_Regular-webfont.ttf | Bin 0 -> 36180 bytes .../web/app/fonts/Sansation_Regular-webfont.woff | Bin 0 -> 18316 bytes pyload/web/app/images/default/bgpattern.png | Bin 0 -> 2487 bytes pyload/web/app/images/default/checks_sheet.png | Bin 0 -> 1145 bytes pyload/web/app/images/default/fancy_deboss.png | Bin 0 -> 265 bytes pyload/web/app/images/default/logo.png | Bin 0 -> 5329 bytes pyload/web/app/images/default/logo_grey.png | Bin 0 -> 1141 bytes pyload/web/app/images/icon.png | Bin 0 -> 1912 bytes pyload/web/app/index.html | 107 + pyload/web/app/scripts/app.js | 105 + pyload/web/app/scripts/collections/AccountList.js | 24 + pyload/web/app/scripts/collections/FileList.js | 18 + .../web/app/scripts/collections/InteractionList.js | 49 + pyload/web/app/scripts/collections/PackageList.js | 16 + pyload/web/app/scripts/collections/ProgressList.js | 18 + pyload/web/app/scripts/config.js | 73 + pyload/web/app/scripts/controller.js | 67 + pyload/web/app/scripts/default.js | 30 + pyload/web/app/scripts/helpers/fileHelper.js | 55 + pyload/web/app/scripts/helpers/formatSize.js | 15 + pyload/web/app/scripts/helpers/formatTime.js | 17 + pyload/web/app/scripts/helpers/pluginIcon.js | 14 + pyload/web/app/scripts/models/Account.js | 51 + pyload/web/app/scripts/models/ConfigHolder.js | 68 + pyload/web/app/scripts/models/ConfigItem.js | 40 + pyload/web/app/scripts/models/File.js | 92 + pyload/web/app/scripts/models/InteractionTask.js | 41 + pyload/web/app/scripts/models/Package.js | 119 + pyload/web/app/scripts/models/Progress.js | 50 + pyload/web/app/scripts/models/ServerStatus.js | 47 + pyload/web/app/scripts/models/TreeCollection.js | 50 + pyload/web/app/scripts/models/UserSession.js | 20 + pyload/web/app/scripts/router.js | 29 + pyload/web/app/scripts/routers/defaultRouter.js | 30 + pyload/web/app/scripts/routers/mobileRouter.js | 56 + pyload/web/app/scripts/utils/animations.js | 129 + pyload/web/app/scripts/utils/apitypes.js | 16 + pyload/web/app/scripts/utils/dialogs.js | 16 + pyload/web/app/scripts/utils/initHB.js | 11 + pyload/web/app/scripts/utils/lazyRequire.js | 97 + pyload/web/app/scripts/vendor/Handlebars-1.0rc1.js | 1927 ++++++++++++ pyload/web/app/scripts/vendor/bootstrap-2.3.2.js | 2291 ++++++++++++++ pyload/web/app/scripts/vendor/jquery.omniwindow.js | 141 + pyload/web/app/scripts/vendor/remaining.js | 149 + pyload/web/app/scripts/views/abstract/itemView.js | 47 + pyload/web/app/scripts/views/abstract/modalView.js | 125 + .../app/scripts/views/accounts/accountListView.js | 52 + .../web/app/scripts/views/accounts/accountModal.js | 72 + .../web/app/scripts/views/accounts/accountView.js | 18 + .../app/scripts/views/dashboard/dashboardView.js | 168 + pyload/web/app/scripts/views/dashboard/fileView.js | 102 + .../web/app/scripts/views/dashboard/filterView.js | 133 + .../web/app/scripts/views/dashboard/packageView.js | 75 + .../app/scripts/views/dashboard/selectionView.js | 155 + pyload/web/app/scripts/views/headerView.js | 240 ++ pyload/web/app/scripts/views/input/inputLoader.js | 8 + pyload/web/app/scripts/views/input/inputView.js | 86 + pyload/web/app/scripts/views/input/textInput.js | 36 + pyload/web/app/scripts/views/linkGrabberModal.js | 49 + pyload/web/app/scripts/views/loginView.js | 37 + pyload/web/app/scripts/views/notificationView.js | 83 + pyload/web/app/scripts/views/progressView.js | 33 + pyload/web/app/scripts/views/queryModal.js | 69 + .../scripts/views/settings/configSectionView.js | 99 + .../scripts/views/settings/pluginChooserModal.js | 69 + .../web/app/scripts/views/settings/settingsView.js | 184 ++ pyload/web/app/styles/default/accounts.less | 6 + pyload/web/app/styles/default/admin.less | 17 + pyload/web/app/styles/default/base.less | 163 + pyload/web/app/styles/default/common.less | 90 + pyload/web/app/styles/default/dashboard.less | 331 ++ pyload/web/app/styles/default/main.less | 11 + pyload/web/app/styles/default/settings.less | 121 + pyload/web/app/styles/default/style.less | 366 +++ pyload/web/app/styles/font.css | 37 + .../app/templates/default/accounts/account.html | 10 + .../app/templates/default/accounts/actionbar.html | 5 + .../web/app/templates/default/accounts/layout.html | 19 + pyload/web/app/templates/default/admin.html | 223 ++ .../app/templates/default/dashboard/actionbar.html | 54 + .../web/app/templates/default/dashboard/file.html | 34 + .../app/templates/default/dashboard/layout.html | 35 + .../app/templates/default/dashboard/package.html | 50 + .../app/templates/default/dashboard/select.html | 11 + .../app/templates/default/dialogs/addAccount.html | 42 + .../templates/default/dialogs/addPluginConfig.html | 26 + .../templates/default/dialogs/confirmDelete.html | 11 + .../templates/default/dialogs/interactionTask.html | 37 + .../app/templates/default/dialogs/linkgrabber.html | 49 + .../web/app/templates/default/dialogs/modal.html | 10 + .../web/app/templates/default/header/layout.html | 62 + .../web/app/templates/default/header/progress.html | 14 + .../app/templates/default/header/progressbar.html | 27 + .../web/app/templates/default/header/status.html | 3 + pyload/web/app/templates/default/login.html | 28 + pyload/web/app/templates/default/notification.html | 11 + .../app/templates/default/settings/actionbar.html | 5 + .../web/app/templates/default/settings/config.html | 17 + .../app/templates/default/settings/configItem.html | 7 + .../web/app/templates/default/settings/layout.html | 11 + .../web/app/templates/default/settings/menu.html | 40 + pyload/web/app/templates/default/setup.html | 16 + pyload/web/app/unavailable.html | 18 + pyload/web/bower.json | 23 + pyload/web/cnl_app.py | 166 + pyload/web/middlewares.py | 134 + pyload/web/package.json | 32 + pyload/web/pyload_app.py | 78 + pyload/web/servers.py | 162 + pyload/web/setup_app.py | 21 + pyload/web/utils.py | 78 + pyload/web/webinterface.py | 98 + 575 files changed, 68345 insertions(+) create mode 100644 pyload/AccountManager.py create mode 100644 pyload/AddonManager.py create mode 100644 pyload/Api.py create mode 100644 pyload/FileManager.py create mode 100644 pyload/InitHomeDir.py create mode 100644 pyload/PluginManager.py create mode 100644 pyload/Scheduler.py create mode 100644 pyload/Setup.py create mode 100644 pyload/__init__.py create mode 100644 pyload/api/AccountApi.py create mode 100644 pyload/api/AddonApi.py create mode 100644 pyload/api/ApiComponent.py create mode 100644 pyload/api/CollectorApi.py create mode 100644 pyload/api/ConfigApi.py create mode 100644 pyload/api/CoreApi.py create mode 100644 pyload/api/DownloadApi.py create mode 100644 pyload/api/DownloadPreparingApi.py create mode 100644 pyload/api/FileApi.py create mode 100644 pyload/api/UserInteractionApi.py create mode 100644 pyload/api/__init__.py create mode 100644 pyload/cli/AddPackage.py create mode 100644 pyload/cli/Handler.py create mode 100644 pyload/cli/ManageFiles.py create mode 100644 pyload/cli/__init__.py create mode 100644 pyload/cli/printer.py create mode 100644 pyload/config/ConfigManager.py create mode 100644 pyload/config/ConfigParser.py create mode 100644 pyload/config/__init__.py create mode 100644 pyload/config/default.py create mode 100644 pyload/database/AccountDatabase.py create mode 100644 pyload/database/ConfigDatabase.py create mode 100644 pyload/database/DatabaseBackend.py create mode 100644 pyload/database/FileDatabase.py create mode 100644 pyload/database/StatisticDatabase.py create mode 100644 pyload/database/StorageDatabase.py create mode 100644 pyload/database/UserDatabase.py create mode 100644 pyload/database/__init__.py create mode 100644 pyload/datatypes/PyFile.py create mode 100644 pyload/datatypes/PyPackage.py create mode 100644 pyload/datatypes/User.py create mode 100644 pyload/datatypes/__init__.py create mode 100644 pyload/interaction/EventManager.py create mode 100644 pyload/interaction/InteractionManager.py create mode 100644 pyload/interaction/InteractionTask.py create mode 100644 pyload/interaction/__init__.py create mode 100644 pyload/lib/Getch.py create mode 100644 pyload/lib/ReadWriteLock.py create mode 100644 pyload/lib/SafeEval.py create mode 100644 pyload/lib/__init__.py create mode 100644 pyload/lib/beaker/__init__.py create mode 100644 pyload/lib/beaker/cache.py create mode 100644 pyload/lib/beaker/container.py create mode 100644 pyload/lib/beaker/converters.py create mode 100644 pyload/lib/beaker/crypto/__init__.py create mode 100644 pyload/lib/beaker/crypto/jcecrypto.py create mode 100644 pyload/lib/beaker/crypto/pbkdf2.py create mode 100644 pyload/lib/beaker/crypto/pycrypto.py create mode 100644 pyload/lib/beaker/crypto/util.py create mode 100644 pyload/lib/beaker/exceptions.py create mode 100644 pyload/lib/beaker/ext/__init__.py create mode 100644 pyload/lib/beaker/ext/database.py create mode 100644 pyload/lib/beaker/ext/google.py create mode 100644 pyload/lib/beaker/ext/memcached.py create mode 100644 pyload/lib/beaker/ext/sqla.py create mode 100644 pyload/lib/beaker/middleware.py create mode 100644 pyload/lib/beaker/session.py create mode 100644 pyload/lib/beaker/synchronization.py create mode 100644 pyload/lib/beaker/util.py create mode 100644 pyload/lib/bottle.py create mode 100644 pyload/lib/forwarder.py create mode 100644 pyload/lib/hg_tool.py create mode 100644 pyload/lib/mod_pywebsocket/COPYING create mode 100644 pyload/lib/mod_pywebsocket/__init__.py create mode 100644 pyload/lib/mod_pywebsocket/_stream_base.py create mode 100644 pyload/lib/mod_pywebsocket/_stream_hixie75.py create mode 100644 pyload/lib/mod_pywebsocket/_stream_hybi.py create mode 100644 pyload/lib/mod_pywebsocket/common.py create mode 100644 pyload/lib/mod_pywebsocket/dispatch.py create mode 100644 pyload/lib/mod_pywebsocket/extensions.py create mode 100644 pyload/lib/mod_pywebsocket/handshake/__init__.py create mode 100644 pyload/lib/mod_pywebsocket/handshake/_base.py create mode 100644 pyload/lib/mod_pywebsocket/handshake/hybi.py create mode 100644 pyload/lib/mod_pywebsocket/handshake/hybi00.py create mode 100644 pyload/lib/mod_pywebsocket/headerparserhandler.py create mode 100644 pyload/lib/mod_pywebsocket/http_header_util.py create mode 100644 pyload/lib/mod_pywebsocket/memorizingfile.py create mode 100644 pyload/lib/mod_pywebsocket/msgutil.py create mode 100644 pyload/lib/mod_pywebsocket/mux.py create mode 100755 pyload/lib/mod_pywebsocket/standalone.py create mode 100644 pyload/lib/mod_pywebsocket/stream.py create mode 100644 pyload/lib/mod_pywebsocket/util.py create mode 100644 pyload/lib/new_collections.py create mode 100644 pyload/lib/rename_process.py create mode 100644 pyload/lib/simplejson/__init__.py create mode 100644 pyload/lib/simplejson/decoder.py create mode 100644 pyload/lib/simplejson/encoder.py create mode 100644 pyload/lib/simplejson/ordered_dict.py create mode 100644 pyload/lib/simplejson/scanner.py create mode 100644 pyload/lib/simplejson/tool.py create mode 100644 pyload/lib/wsgiserver/LICENSE.txt create mode 100644 pyload/lib/wsgiserver/__init__.py create mode 100644 pyload/network/Browser.py create mode 100644 pyload/network/Bucket.py create mode 100644 pyload/network/CookieJar.py create mode 100644 pyload/network/HTTPChunk.py create mode 100644 pyload/network/HTTPDownload.py create mode 100644 pyload/network/HTTPRequest.py create mode 100644 pyload/network/RequestFactory.py create mode 100644 pyload/network/XDCCRequest.py create mode 100644 pyload/network/__init__.py create mode 100644 pyload/plugins/Account.py create mode 100644 pyload/plugins/Addon.py create mode 100644 pyload/plugins/Base.py create mode 100644 pyload/plugins/Crypter.py create mode 100644 pyload/plugins/Hoster.py create mode 100644 pyload/plugins/MultiHoster.py create mode 100644 pyload/plugins/ReCaptcha.py create mode 100644 pyload/plugins/__init__.py create mode 100644 pyload/plugins/accounts/AlldebridCom.py create mode 100644 pyload/plugins/accounts/BayfilesCom.py create mode 100644 pyload/plugins/accounts/BitshareCom.py create mode 100644 pyload/plugins/accounts/BoltsharingCom.py create mode 100644 pyload/plugins/accounts/CramitIn.py create mode 100644 pyload/plugins/accounts/CyberlockerCh.py create mode 100644 pyload/plugins/accounts/CzshareCom.py create mode 100644 pyload/plugins/accounts/DdlstorageCom.py create mode 100644 pyload/plugins/accounts/DebridItaliaCom.py create mode 100644 pyload/plugins/accounts/DepositfilesCom.py create mode 100644 pyload/plugins/accounts/EasybytezCom.py create mode 100644 pyload/plugins/accounts/EgoFilesCom.py create mode 100644 pyload/plugins/accounts/EuroshareEu.py create mode 100644 pyload/plugins/accounts/FastshareCz.py create mode 100644 pyload/plugins/accounts/FilebeerInfo.py create mode 100644 pyload/plugins/accounts/FilecloudIo.py create mode 100644 pyload/plugins/accounts/FilefactoryCom.py create mode 100644 pyload/plugins/accounts/FilejungleCom.py create mode 100644 pyload/plugins/accounts/FilerioCom.py create mode 100644 pyload/plugins/accounts/FilesMailRu.py create mode 100644 pyload/plugins/accounts/FileserveCom.py create mode 100644 pyload/plugins/accounts/FourSharedCom.py create mode 100644 pyload/plugins/accounts/FreakshareCom.py create mode 100644 pyload/plugins/accounts/FshareVn.py create mode 100644 pyload/plugins/accounts/Ftp.py create mode 100644 pyload/plugins/accounts/HellshareCz.py create mode 100644 pyload/plugins/accounts/HellspyCz.py create mode 100644 pyload/plugins/accounts/HotfileCom.py create mode 100644 pyload/plugins/accounts/Http.py create mode 100644 pyload/plugins/accounts/MegasharesCom.py create mode 100644 pyload/plugins/accounts/MultiDebridCom.py create mode 100644 pyload/plugins/accounts/MultishareCz.py create mode 100755 pyload/plugins/accounts/NetloadIn.py create mode 100644 pyload/plugins/accounts/Premium4Me.py create mode 100644 pyload/plugins/accounts/PremiumizeMe.py create mode 100644 pyload/plugins/accounts/QuickshareCz.py create mode 100644 pyload/plugins/accounts/RapidgatorNet.py create mode 100644 pyload/plugins/accounts/RapidshareCom.py create mode 100644 pyload/plugins/accounts/RarefileNet.py create mode 100644 pyload/plugins/accounts/RealdebridCom.py create mode 100644 pyload/plugins/accounts/RehostTo.py create mode 100644 pyload/plugins/accounts/ReloadCc.py create mode 100644 pyload/plugins/accounts/RyushareCom.py create mode 100644 pyload/plugins/accounts/Share76Com.py create mode 100644 pyload/plugins/accounts/ShareFilesCo.py create mode 100644 pyload/plugins/accounts/ShareRapidCom.py create mode 100644 pyload/plugins/accounts/ShareonlineBiz.py create mode 100644 pyload/plugins/accounts/SpeedLoadOrg.py create mode 100644 pyload/plugins/accounts/StahnuTo.py create mode 100644 pyload/plugins/accounts/TurbobitNet.py create mode 100644 pyload/plugins/accounts/UlozTo.py create mode 100644 pyload/plugins/accounts/UploadedTo.py create mode 100644 pyload/plugins/accounts/UploadheroCom.py create mode 100644 pyload/plugins/accounts/UploadingCom.py create mode 100644 pyload/plugins/accounts/UploadstationCom.py create mode 100644 pyload/plugins/accounts/UptoboxCom.py create mode 100644 pyload/plugins/accounts/WarserverCz.py create mode 100644 pyload/plugins/accounts/WuploadCom.py create mode 100644 pyload/plugins/accounts/X7To.py create mode 100644 pyload/plugins/accounts/YibaishiwuCom.py create mode 100644 pyload/plugins/accounts/ZeveraCom.py create mode 100644 pyload/plugins/accounts/__init__.py create mode 100644 pyload/plugins/addons/AlldebridCom.py create mode 100644 pyload/plugins/addons/BypassCaptcha.py create mode 100644 pyload/plugins/addons/CaptchaBrotherhood.py create mode 100644 pyload/plugins/addons/CaptchaTrader.py create mode 100644 pyload/plugins/addons/Checksum.py create mode 100644 pyload/plugins/addons/ClickAndLoad.py create mode 100644 pyload/plugins/addons/DeathByCaptcha.py create mode 100644 pyload/plugins/addons/DebridItaliaCom.py create mode 100644 pyload/plugins/addons/DeleteFinished.py create mode 100644 pyload/plugins/addons/DownloadScheduler.py create mode 100644 pyload/plugins/addons/EasybytezCom.py create mode 100644 pyload/plugins/addons/Ev0InFetcher.py create mode 100644 pyload/plugins/addons/ExpertDecoders.py create mode 100644 pyload/plugins/addons/ExternalScripts.py create mode 100644 pyload/plugins/addons/ExtractArchive.py create mode 100644 pyload/plugins/addons/HotFolder.py create mode 100644 pyload/plugins/addons/IRCInterface.py create mode 100644 pyload/plugins/addons/ImageTyperz.py create mode 100644 pyload/plugins/addons/LinkdecrypterCom.py create mode 100644 pyload/plugins/addons/MergeFiles.py create mode 100644 pyload/plugins/addons/MultiDebridCom.py create mode 100644 pyload/plugins/addons/MultiHome.py create mode 100644 pyload/plugins/addons/MultiHoster.py create mode 100644 pyload/plugins/addons/MultishareCz.py create mode 100644 pyload/plugins/addons/Premium4Me.py create mode 100644 pyload/plugins/addons/PremiumizeMe.py create mode 100644 pyload/plugins/addons/RealdebridCom.py create mode 100644 pyload/plugins/addons/RehostTo.py create mode 100644 pyload/plugins/addons/RestartFailed.py create mode 100644 pyload/plugins/addons/UnSkipOnFail.py create mode 100644 pyload/plugins/addons/UpdateManager.py create mode 100644 pyload/plugins/addons/XFileSharingPro.py create mode 100644 pyload/plugins/addons/XMPPInterface.py create mode 100644 pyload/plugins/addons/ZeveraCom.py create mode 100644 pyload/plugins/addons/__init__.py create mode 100644 pyload/plugins/crypter/C1neonCom.py create mode 100644 pyload/plugins/crypter/CCF.py create mode 100644 pyload/plugins/crypter/CrockoComFolder.py create mode 100644 pyload/plugins/crypter/CryptItCom.py create mode 100644 pyload/plugins/crypter/CzshareComFolder.py create mode 100644 pyload/plugins/crypter/DDLMusicOrg.py create mode 100644 pyload/plugins/crypter/DataHuFolder.py create mode 100644 pyload/plugins/crypter/DdlstorageComFolder.py create mode 100644 pyload/plugins/crypter/DepositfilesComFolder.py create mode 100644 pyload/plugins/crypter/Dereferer.py create mode 100644 pyload/plugins/crypter/DontKnowMe.py create mode 100644 pyload/plugins/crypter/DownloadVimeoCom.py create mode 100644 pyload/plugins/crypter/DuckCryptInfo.py create mode 100644 pyload/plugins/crypter/EasybytezComFolder.py create mode 100644 pyload/plugins/crypter/EmbeduploadCom.py create mode 100644 pyload/plugins/crypter/FilebeerInfoFolder.py create mode 100644 pyload/plugins/crypter/FilefactoryComFolder.py create mode 100644 pyload/plugins/crypter/FileserveComFolder.py create mode 100644 pyload/plugins/crypter/FourChanOrg.py create mode 100644 pyload/plugins/crypter/FshareVnFolder.py create mode 100644 pyload/plugins/crypter/GooGl.py create mode 100644 pyload/plugins/crypter/HoerbuchIn.py create mode 100644 pyload/plugins/crypter/HotfileFolderCom.py create mode 100644 pyload/plugins/crypter/ILoadTo.py create mode 100644 pyload/plugins/crypter/LetitbitNetFolder.py create mode 100644 pyload/plugins/crypter/LinkList.py create mode 100644 pyload/plugins/crypter/LinkSaveIn.py create mode 100644 pyload/plugins/crypter/LinkdecrypterCom.py create mode 100644 pyload/plugins/crypter/LixIn.py create mode 100644 pyload/plugins/crypter/LofCc.py create mode 100644 pyload/plugins/crypter/MBLinkInfo.py create mode 100644 pyload/plugins/crypter/MediafireComFolder.py create mode 100644 pyload/plugins/crypter/Movie2kTo.py create mode 100644 pyload/plugins/crypter/MultiloadCz.py create mode 100644 pyload/plugins/crypter/MultiuploadCom.py create mode 100644 pyload/plugins/crypter/NCryptIn.py create mode 100644 pyload/plugins/crypter/NetfolderIn.py create mode 100644 pyload/plugins/crypter/OneKhDe.py create mode 100755 pyload/plugins/crypter/OronComFolder.py create mode 100644 pyload/plugins/crypter/QuickshareCzFolder.py create mode 100644 pyload/plugins/crypter/RSDF.py create mode 100644 pyload/plugins/crypter/RSLayerCom.py create mode 100644 pyload/plugins/crypter/RelinkUs.py create mode 100644 pyload/plugins/crypter/SecuredIn.py create mode 100644 pyload/plugins/crypter/SerienjunkiesOrg.py create mode 100644 pyload/plugins/crypter/ShareLinksBiz.py create mode 100644 pyload/plugins/crypter/ShareRapidComFolder.py create mode 100644 pyload/plugins/crypter/SpeedLoadOrgFolder.py create mode 100644 pyload/plugins/crypter/StealthTo.py create mode 100644 pyload/plugins/crypter/TrailerzoneInfo.py create mode 100644 pyload/plugins/crypter/UlozToFolder.py create mode 100644 pyload/plugins/crypter/UploadedToFolder.py create mode 100644 pyload/plugins/crypter/WiiReloadedOrg.py create mode 100644 pyload/plugins/crypter/XfilesharingProFolder.py create mode 100644 pyload/plugins/crypter/YoutubeBatch.py create mode 100644 pyload/plugins/crypter/__init__.py create mode 100755 pyload/plugins/hooks/Captcha9kw.py create mode 100644 pyload/plugins/hooks/ReloadCc.py create mode 100644 pyload/plugins/hoster/ARD.py create mode 100644 pyload/plugins/hoster/AlldebridCom.py create mode 100644 pyload/plugins/hoster/BasePlugin.py create mode 100644 pyload/plugins/hoster/BayfilesCom.py create mode 100644 pyload/plugins/hoster/BezvadataCz.py create mode 100644 pyload/plugins/hoster/BillionuploadsCom.py create mode 100644 pyload/plugins/hoster/BitshareCom.py create mode 100644 pyload/plugins/hoster/BoltsharingCom.py create mode 100644 pyload/plugins/hoster/CatShareNet.py create mode 100644 pyload/plugins/hoster/ChipDe.py create mode 100644 pyload/plugins/hoster/CloudzerNet.py create mode 100644 pyload/plugins/hoster/CramitIn.py create mode 100644 pyload/plugins/hoster/CrockoCom.py create mode 100644 pyload/plugins/hoster/CyberlockerCh.py create mode 100644 pyload/plugins/hoster/CzshareCom.py create mode 100644 pyload/plugins/hoster/DailymotionCom.py create mode 100644 pyload/plugins/hoster/DataHu.py create mode 100644 pyload/plugins/hoster/DataportCz.py create mode 100644 pyload/plugins/hoster/DateiTo.py create mode 100644 pyload/plugins/hoster/DdlstorageCom.py create mode 100644 pyload/plugins/hoster/DebridItaliaCom.py create mode 100644 pyload/plugins/hoster/DepositfilesCom.py create mode 100644 pyload/plugins/hoster/DlFreeFr.py create mode 100644 pyload/plugins/hoster/EasybytezCom.py create mode 100644 pyload/plugins/hoster/EdiskCz.py create mode 100644 pyload/plugins/hoster/EgoFilesCom.py create mode 100644 pyload/plugins/hoster/EuroshareEu.py create mode 100644 pyload/plugins/hoster/ExtabitCom.py create mode 100644 pyload/plugins/hoster/FastshareCz.py create mode 100644 pyload/plugins/hoster/FileApeCom.py create mode 100644 pyload/plugins/hoster/FilebeerInfo.py create mode 100644 pyload/plugins/hoster/FilecloudIo.py create mode 100644 pyload/plugins/hoster/FilefactoryCom.py create mode 100644 pyload/plugins/hoster/FilejungleCom.py create mode 100644 pyload/plugins/hoster/FilepostCom.py create mode 100644 pyload/plugins/hoster/FilerNet.py create mode 100644 pyload/plugins/hoster/FilerioCom.py create mode 100644 pyload/plugins/hoster/FilesMailRu.py create mode 100644 pyload/plugins/hoster/FileserveCom.py create mode 100644 pyload/plugins/hoster/FileshareInUa.py create mode 100644 pyload/plugins/hoster/FlyFilesNet.py create mode 100644 pyload/plugins/hoster/FourSharedCom.py create mode 100644 pyload/plugins/hoster/FreakshareCom.py create mode 100644 pyload/plugins/hoster/FreevideoCz.py create mode 100644 pyload/plugins/hoster/FshareVn.py create mode 100644 pyload/plugins/hoster/Ftp.py create mode 100644 pyload/plugins/hoster/GamefrontCom.py create mode 100644 pyload/plugins/hoster/GigapetaCom.py create mode 100644 pyload/plugins/hoster/HellshareCz.py create mode 100644 pyload/plugins/hoster/HellspyCz.py create mode 100644 pyload/plugins/hoster/HotfileCom.py create mode 100644 pyload/plugins/hoster/IFileWs.py create mode 100644 pyload/plugins/hoster/IcyFilesCom.py create mode 100644 pyload/plugins/hoster/IfileIt.py create mode 100644 pyload/plugins/hoster/IfolderRu.py create mode 100644 pyload/plugins/hoster/JumbofilesCom.py create mode 100644 pyload/plugins/hoster/LetitbitNet.py create mode 100644 pyload/plugins/hoster/LoadTo.py create mode 100644 pyload/plugins/hoster/LuckyShareNet.py create mode 100644 pyload/plugins/hoster/MediafireCom.py create mode 100644 pyload/plugins/hoster/MegaNz.py create mode 100644 pyload/plugins/hoster/MegasharesCom.py create mode 100644 pyload/plugins/hoster/MovReelCom.py create mode 100644 pyload/plugins/hoster/MultiDebridCom.py create mode 100644 pyload/plugins/hoster/MultishareCz.py create mode 100644 pyload/plugins/hoster/MyvideoDe.py create mode 100644 pyload/plugins/hoster/NarodRu.py create mode 100644 pyload/plugins/hoster/NetloadIn.py create mode 100644 pyload/plugins/hoster/NovafileCom.py create mode 100644 pyload/plugins/hoster/NowDownloadEu.py create mode 100644 pyload/plugins/hoster/OneFichierCom.py create mode 100644 pyload/plugins/hoster/PornhostCom.py create mode 100644 pyload/plugins/hoster/PornhubCom.py create mode 100644 pyload/plugins/hoster/Premium4Me.py create mode 100644 pyload/plugins/hoster/PremiumizeMe.py create mode 100644 pyload/plugins/hoster/PutlockerCom.py create mode 100644 pyload/plugins/hoster/QuickshareCz.py create mode 100644 pyload/plugins/hoster/RapidgatorNet.py create mode 100644 pyload/plugins/hoster/RapidshareCom.py create mode 100644 pyload/plugins/hoster/RarefileNet.py create mode 100644 pyload/plugins/hoster/RealdebridCom.py create mode 100644 pyload/plugins/hoster/RedtubeCom.py create mode 100644 pyload/plugins/hoster/RehostTo.py create mode 100644 pyload/plugins/hoster/ReloadCc.py create mode 100644 pyload/plugins/hoster/RyushareCom.py create mode 100644 pyload/plugins/hoster/SecureUploadEu.py create mode 100644 pyload/plugins/hoster/SendmywayCom.py create mode 100644 pyload/plugins/hoster/SendspaceCom.py create mode 100644 pyload/plugins/hoster/Share4webCom.py create mode 100644 pyload/plugins/hoster/Share76Com.py create mode 100644 pyload/plugins/hoster/ShareFilesCo.py create mode 100644 pyload/plugins/hoster/ShareRapidCom.py create mode 100644 pyload/plugins/hoster/SharebeesCom.py create mode 100644 pyload/plugins/hoster/ShareonlineBiz.py create mode 100644 pyload/plugins/hoster/ShareplaceCom.py create mode 100644 pyload/plugins/hoster/ShragleCom.py create mode 100644 pyload/plugins/hoster/SpeedLoadOrg.py create mode 100644 pyload/plugins/hoster/SpeedfileCz.py create mode 100644 pyload/plugins/hoster/StreamCz.py create mode 100644 pyload/plugins/hoster/TurbobitNet.py create mode 100644 pyload/plugins/hoster/TurbouploadCom.py create mode 100644 pyload/plugins/hoster/TusfilesNet.py create mode 100644 pyload/plugins/hoster/TwoSharedCom.py create mode 100644 pyload/plugins/hoster/UlozTo.py create mode 100644 pyload/plugins/hoster/UloziskoSk.py create mode 100644 pyload/plugins/hoster/UnibytesCom.py create mode 100644 pyload/plugins/hoster/UploadStationCom.py create mode 100644 pyload/plugins/hoster/UploadedTo.py create mode 100644 pyload/plugins/hoster/UploadheroCom.py create mode 100644 pyload/plugins/hoster/UploadingCom.py create mode 100644 pyload/plugins/hoster/UptoboxCom.py create mode 100644 pyload/plugins/hoster/VeehdCom.py create mode 100644 pyload/plugins/hoster/WarserverCz.py create mode 100644 pyload/plugins/hoster/WebshareCz.py create mode 100644 pyload/plugins/hoster/WrzucTo.py create mode 100644 pyload/plugins/hoster/WuploadCom.py create mode 100644 pyload/plugins/hoster/X7To.py create mode 100644 pyload/plugins/hoster/XFileSharingPro.py create mode 100644 pyload/plugins/hoster/XHamsterCom.py create mode 100644 pyload/plugins/hoster/XVideosCom.py create mode 100644 pyload/plugins/hoster/Xdcc.py create mode 100644 pyload/plugins/hoster/XvidstageCom.py create mode 100644 pyload/plugins/hoster/YibaishiwuCom.py create mode 100644 pyload/plugins/hoster/YoupornCom.py create mode 100644 pyload/plugins/hoster/YourfilesTo.py create mode 100644 pyload/plugins/hoster/YoutubeCom.py create mode 100644 pyload/plugins/hoster/ZDF.py create mode 100644 pyload/plugins/hoster/ZeveraCom.py create mode 100644 pyload/plugins/hoster/ZippyshareCom.py create mode 100644 pyload/plugins/hoster/__init__.py create mode 100644 pyload/plugins/internal/AbstractExtractor.py create mode 100644 pyload/plugins/internal/CaptchaService.py create mode 100644 pyload/plugins/internal/DeadHoster.py create mode 100644 pyload/plugins/internal/NetloadInOCR.py create mode 100644 pyload/plugins/internal/OCR.py create mode 100644 pyload/plugins/internal/ShareonlineBizOCR.py create mode 100644 pyload/plugins/internal/SimpleCrypter.py create mode 100644 pyload/plugins/internal/SimpleHoster.py create mode 100644 pyload/plugins/internal/UnRar.py create mode 100644 pyload/plugins/internal/UnZip.py create mode 100644 pyload/plugins/internal/XFSPAccount.py create mode 100644 pyload/plugins/internal/__init__.py create mode 100644 pyload/remote/ClickAndLoadBackend.py create mode 100644 pyload/remote/JSONClient.py create mode 100644 pyload/remote/RemoteManager.py create mode 100644 pyload/remote/WSClient.py create mode 100644 pyload/remote/WebSocketBackend.py create mode 100644 pyload/remote/__init__.py create mode 100644 pyload/remote/apitypes.py create mode 100644 pyload/remote/apitypes_debug.py create mode 100644 pyload/remote/create_apitypes.py create mode 100644 pyload/remote/create_jstypes.py create mode 100644 pyload/remote/json_converter.py create mode 100644 pyload/remote/pyload.thrift create mode 100644 pyload/remote/ttypes.py create mode 100644 pyload/remote/wsbackend/AbstractHandler.py create mode 100644 pyload/remote/wsbackend/ApiHandler.py create mode 100644 pyload/remote/wsbackend/AsyncHandler.py create mode 100644 pyload/remote/wsbackend/Dispatcher.py create mode 100644 pyload/remote/wsbackend/Server.py create mode 100644 pyload/remote/wsbackend/__init__.py create mode 100644 pyload/setup/System_Checks.py create mode 100644 pyload/threads/AddonThread.py create mode 100644 pyload/threads/BaseThread.py create mode 100644 pyload/threads/DecrypterThread.py create mode 100644 pyload/threads/DownloadThread.py create mode 100644 pyload/threads/InfoThread.py create mode 100644 pyload/threads/ThreadManager.py create mode 100644 pyload/threads/__init__.py create mode 100644 pyload/utils/ImportDebugger.py create mode 100644 pyload/utils/JsEngine.py create mode 100644 pyload/utils/__init__.py create mode 100644 pyload/utils/fs.py create mode 100644 pyload/utils/json_layer.py create mode 100644 pyload/utils/packagetools.py create mode 100644 pyload/utils/pylgettext.py create mode 100644 pyload/web/.bowerrc create mode 100644 pyload/web/.jshintrc create mode 100644 pyload/web/Gruntfile.js create mode 100644 pyload/web/ServerThread.py create mode 100644 pyload/web/__init__.py create mode 100644 pyload/web/api_app.py create mode 100644 pyload/web/app/favicon.ico create mode 100644 pyload/web/app/fonts/Sansation_Bold-webfont.eot create mode 100644 pyload/web/app/fonts/Sansation_Bold-webfont.ttf create mode 100644 pyload/web/app/fonts/Sansation_Bold-webfont.woff create mode 100644 pyload/web/app/fonts/Sansation_Light-webfont.eot create mode 100644 pyload/web/app/fonts/Sansation_Light-webfont.ttf create mode 100644 pyload/web/app/fonts/Sansation_Light-webfont.woff create mode 100644 pyload/web/app/fonts/Sansation_Regular-webfont.eot create mode 100644 pyload/web/app/fonts/Sansation_Regular-webfont.ttf create mode 100644 pyload/web/app/fonts/Sansation_Regular-webfont.woff create mode 100644 pyload/web/app/images/default/bgpattern.png create mode 100644 pyload/web/app/images/default/checks_sheet.png create mode 100644 pyload/web/app/images/default/fancy_deboss.png create mode 100644 pyload/web/app/images/default/logo.png create mode 100644 pyload/web/app/images/default/logo_grey.png create mode 100644 pyload/web/app/images/icon.png create mode 100644 pyload/web/app/index.html create mode 100644 pyload/web/app/scripts/app.js create mode 100644 pyload/web/app/scripts/collections/AccountList.js create mode 100644 pyload/web/app/scripts/collections/FileList.js create mode 100644 pyload/web/app/scripts/collections/InteractionList.js create mode 100644 pyload/web/app/scripts/collections/PackageList.js create mode 100644 pyload/web/app/scripts/collections/ProgressList.js create mode 100644 pyload/web/app/scripts/config.js create mode 100644 pyload/web/app/scripts/controller.js create mode 100644 pyload/web/app/scripts/default.js create mode 100644 pyload/web/app/scripts/helpers/fileHelper.js create mode 100644 pyload/web/app/scripts/helpers/formatSize.js create mode 100644 pyload/web/app/scripts/helpers/formatTime.js create mode 100644 pyload/web/app/scripts/helpers/pluginIcon.js create mode 100644 pyload/web/app/scripts/models/Account.js create mode 100644 pyload/web/app/scripts/models/ConfigHolder.js create mode 100644 pyload/web/app/scripts/models/ConfigItem.js create mode 100644 pyload/web/app/scripts/models/File.js create mode 100644 pyload/web/app/scripts/models/InteractionTask.js create mode 100644 pyload/web/app/scripts/models/Package.js create mode 100644 pyload/web/app/scripts/models/Progress.js create mode 100644 pyload/web/app/scripts/models/ServerStatus.js create mode 100644 pyload/web/app/scripts/models/TreeCollection.js create mode 100644 pyload/web/app/scripts/models/UserSession.js create mode 100644 pyload/web/app/scripts/router.js create mode 100644 pyload/web/app/scripts/routers/defaultRouter.js create mode 100644 pyload/web/app/scripts/routers/mobileRouter.js create mode 100644 pyload/web/app/scripts/utils/animations.js create mode 100644 pyload/web/app/scripts/utils/apitypes.js create mode 100644 pyload/web/app/scripts/utils/dialogs.js create mode 100644 pyload/web/app/scripts/utils/initHB.js create mode 100644 pyload/web/app/scripts/utils/lazyRequire.js create mode 100644 pyload/web/app/scripts/vendor/Handlebars-1.0rc1.js create mode 100755 pyload/web/app/scripts/vendor/bootstrap-2.3.2.js create mode 100644 pyload/web/app/scripts/vendor/jquery.omniwindow.js create mode 100644 pyload/web/app/scripts/vendor/remaining.js create mode 100644 pyload/web/app/scripts/views/abstract/itemView.js create mode 100644 pyload/web/app/scripts/views/abstract/modalView.js create mode 100644 pyload/web/app/scripts/views/accounts/accountListView.js create mode 100644 pyload/web/app/scripts/views/accounts/accountModal.js create mode 100644 pyload/web/app/scripts/views/accounts/accountView.js create mode 100644 pyload/web/app/scripts/views/dashboard/dashboardView.js create mode 100644 pyload/web/app/scripts/views/dashboard/fileView.js create mode 100644 pyload/web/app/scripts/views/dashboard/filterView.js create mode 100644 pyload/web/app/scripts/views/dashboard/packageView.js create mode 100644 pyload/web/app/scripts/views/dashboard/selectionView.js create mode 100644 pyload/web/app/scripts/views/headerView.js create mode 100644 pyload/web/app/scripts/views/input/inputLoader.js create mode 100644 pyload/web/app/scripts/views/input/inputView.js create mode 100644 pyload/web/app/scripts/views/input/textInput.js create mode 100644 pyload/web/app/scripts/views/linkGrabberModal.js create mode 100644 pyload/web/app/scripts/views/loginView.js create mode 100644 pyload/web/app/scripts/views/notificationView.js create mode 100644 pyload/web/app/scripts/views/progressView.js create mode 100644 pyload/web/app/scripts/views/queryModal.js create mode 100644 pyload/web/app/scripts/views/settings/configSectionView.js create mode 100644 pyload/web/app/scripts/views/settings/pluginChooserModal.js create mode 100644 pyload/web/app/scripts/views/settings/settingsView.js create mode 100644 pyload/web/app/styles/default/accounts.less create mode 100644 pyload/web/app/styles/default/admin.less create mode 100644 pyload/web/app/styles/default/base.less create mode 100644 pyload/web/app/styles/default/common.less create mode 100644 pyload/web/app/styles/default/dashboard.less create mode 100644 pyload/web/app/styles/default/main.less create mode 100644 pyload/web/app/styles/default/settings.less create mode 100644 pyload/web/app/styles/default/style.less create mode 100644 pyload/web/app/styles/font.css create mode 100644 pyload/web/app/templates/default/accounts/account.html create mode 100644 pyload/web/app/templates/default/accounts/actionbar.html create mode 100644 pyload/web/app/templates/default/accounts/layout.html create mode 100644 pyload/web/app/templates/default/admin.html create mode 100644 pyload/web/app/templates/default/dashboard/actionbar.html create mode 100644 pyload/web/app/templates/default/dashboard/file.html create mode 100644 pyload/web/app/templates/default/dashboard/layout.html create mode 100644 pyload/web/app/templates/default/dashboard/package.html create mode 100644 pyload/web/app/templates/default/dashboard/select.html create mode 100755 pyload/web/app/templates/default/dialogs/addAccount.html create mode 100755 pyload/web/app/templates/default/dialogs/addPluginConfig.html create mode 100644 pyload/web/app/templates/default/dialogs/confirmDelete.html create mode 100755 pyload/web/app/templates/default/dialogs/interactionTask.html create mode 100755 pyload/web/app/templates/default/dialogs/linkgrabber.html create mode 100755 pyload/web/app/templates/default/dialogs/modal.html create mode 100644 pyload/web/app/templates/default/header/layout.html create mode 100644 pyload/web/app/templates/default/header/progress.html create mode 100644 pyload/web/app/templates/default/header/progressbar.html create mode 100644 pyload/web/app/templates/default/header/status.html create mode 100644 pyload/web/app/templates/default/login.html create mode 100644 pyload/web/app/templates/default/notification.html create mode 100644 pyload/web/app/templates/default/settings/actionbar.html create mode 100644 pyload/web/app/templates/default/settings/config.html create mode 100644 pyload/web/app/templates/default/settings/configItem.html create mode 100644 pyload/web/app/templates/default/settings/layout.html create mode 100644 pyload/web/app/templates/default/settings/menu.html create mode 100644 pyload/web/app/templates/default/setup.html create mode 100644 pyload/web/app/unavailable.html create mode 100644 pyload/web/bower.json create mode 100644 pyload/web/cnl_app.py create mode 100644 pyload/web/middlewares.py create mode 100644 pyload/web/package.json create mode 100644 pyload/web/pyload_app.py create mode 100644 pyload/web/servers.py create mode 100644 pyload/web/setup_app.py create mode 100644 pyload/web/utils.py create mode 100644 pyload/web/webinterface.py (limited to 'pyload') diff --git a/pyload/AccountManager.py b/pyload/AccountManager.py new file mode 100644 index 000000000..a476c75c1 --- /dev/null +++ b/pyload/AccountManager.py @@ -0,0 +1,141 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +############################################################################### +# Copyright(c) 2008-2012 pyLoad Team +# http://www.pyload.org +# +# This file is part of pyLoad. +# pyLoad is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# Subjected to the terms and conditions in LICENSE +# +# @author: RaNaN, mkaay +############################################################################### + +from threading import Lock +from random import choice + +from pyload.utils import lock, json + +class AccountManager: + """manages all accounts""" + + def __init__(self, core): + """Constructor""" + + self.core = core + self.lock = Lock() + + self.loadAccounts() + + def loadAccounts(self): + """loads all accounts available""" + + self.accounts = {} + + for plugin, loginname, activated, password, options in self.core.db.loadAccounts(): + # put into options as used in other context + options = json.loads(options) if options else {} + options["activated"] = activated + + self.createAccount(plugin, loginname, password, options) + + + def iterAccounts(self): + """ yields login, account for all accounts""" + for name, data in self.accounts.iteritems(): + for login, account in data.iteritems(): + yield login, account + + def saveAccounts(self): + """save all account information""" + # TODO: multi user + # TODO: activated + + data = [] + for name, plugin in self.accounts.iteritems(): + data.extend( + [(name, acc.loginname, 1 if acc.activated else 0, acc.password, json.dumps(acc.options)) for acc in + plugin.itervalues()]) + self.core.db.saveAccounts(data) + + def createAccount(self, plugin, loginname, password, options): + klass = self.core.pluginManager.loadClass("accounts", plugin) + if not klass: + self.core.log.warning(_("Unknown account plugin %s") % plugin) + return + + if plugin not in self.accounts: + self.accounts[plugin] = {} + + self.core.log.debug("Create account %s:%s" % (plugin, loginname)) + + self.accounts[plugin][loginname] = klass(self, loginname, password, options) + + + def getAccount(self, plugin, user): + return self.accounts[plugin].get(user, None) + + @lock + def updateAccount(self, plugin, user, password=None, options={}): + """add or update account""" + if plugin in self.accounts and user in self.accounts[plugin]: + acc = self.accounts[plugin][user] + updated = acc.update(password, options) + + self.saveAccounts() + if updated: acc.scheduleRefresh(force=True) + else: + self.createAccount(plugin, user, password, options) + self.saveAccounts() + + self.sendChange(plugin, user) + + @lock + def removeAccount(self, plugin, user): + """remove account""" + if plugin in self.accounts and user in self.accounts[plugin]: + del self.accounts[plugin][user] + self.core.db.removeAccount(plugin, user) + self.core.eventManager.dispatchEvent("account:deleted", plugin, user) + else: + self.core.log.debug("Remove non existent account %s %s" % (plugin, user)) + + + @lock + def getAccountForPlugin(self, plugin): + if plugin in self.accounts: + accs = [x for x in self.accounts[plugin].values() if x.isUsable()] + if accs: return choice(accs) + + return None + + @lock + def getAllAccounts(self, refresh=False): + """ Return account info, refresh afterwards if needed + + :param refresh: + :return: + """ + if refresh: + self.core.scheduler.addJob(0, self.core.accountManager.getAllAccounts) + + # load unavailable account info + for p_dict in self.accounts.itervalues(): + for acc in p_dict.itervalues(): + acc.getAccountInfo() + + return self.accounts + + def refreshAllAccounts(self): + """ Force a refresh of every account """ + for p in self.accounts.itervalues(): + for acc in p.itervalues(): + acc.getAccountInfo(True) + + def sendChange(self, plugin, name): + self.core.eventManager.dispatchEvent("account:updated", plugin, name) \ No newline at end of file diff --git a/pyload/AddonManager.py b/pyload/AddonManager.py new file mode 100644 index 000000000..5637b1a8e --- /dev/null +++ b/pyload/AddonManager.py @@ -0,0 +1,248 @@ +# -*- coding: utf-8 -*- + +############################################################################### +# Copyright(c) 2008-2013 pyLoad Team +# http://www.pyload.org +# +# This file is part of pyLoad. +# pyLoad is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# Subjected to the terms and conditions in LICENSE +# +# @author: RaNaN +############################################################################### + +import __builtin__ + +from thread import start_new_thread +from threading import RLock + +from types import MethodType + +from pyload.threads.AddonThread import AddonThread +from pyload.PluginManager import literal_eval +from utils import lock, to_string + +class AddonManager: + """ Manages addons, loading, unloading. """ + + def __init__(self, core): + self.core = core + self.config = self.core.config + + __builtin__.addonManager = self #needed to let addons register themselves + + self.log = self.core.log + # TODO: multiuser, addons can store the user itself, probably not needed here + self.plugins = {} + self.methods = {} # dict of names and list of methods usable by rpc + self.events = {} # Contains event that will be registered + + self.lock = RLock() + self.createIndex() + + # manage addons on config change + self.addEvent("config:changed", self.manageAddons) + + @lock + def callInHooks(self, event, eventName, *args): + """ Calls a method in all addons and catch / log errors""" + for plugin in self.plugins.itervalues(): + self.call(plugin, event, *args) + self.dispatchEvent(eventName, *args) + + def call(self, addon, f, *args): + try: + func = getattr(addon, f) + return func(*args) + except Exception, e: + addon.logError(_("Error when executing %s" % f), e) + self.core.print_exc() + + def addRPC(self, plugin, func, doc): + doc = doc.strip() if doc else "" + + if plugin in self.methods: + self.methods[plugin][func] = doc + else: + self.methods[plugin] = {func: doc} + + def callRPC(self, plugin, func, args): + if not args: args = [] + else: + args = literal_eval(args) + + plugin = self.plugins[plugin] + f = getattr(plugin, func) + return f(*args) + + @lock + def createIndex(self): + active = [] + deactive = [] + + for pluginname in self.core.pluginManager.getPlugins("addons"): + try: + # check first for builtin plugin + attrs = self.core.pluginManager.loadAttributes("addons", pluginname) + internal = attrs.get("internal", False) + + if internal or self.core.config.get(pluginname, "activated"): + pluginClass = self.core.pluginManager.loadClass("addons", pluginname) + + if not pluginClass: continue + + plugin = pluginClass(self.core, self) + self.plugins[pluginClass.__name__] = plugin + + # hide internals from printing + if not internal and plugin.isActivated(): + active.append(pluginClass.__name__) + else: + self.log.debug("Loaded internal plugin: %s" % pluginClass.__name__) + else: + deactive.append(pluginname) + + + except: + self.log.warning(_("Failed activating %(name)s") % {"name": pluginname}) + self.core.print_exc() + + self.log.info(_("Activated addons: %s") % ", ".join(sorted(active))) + self.log.info(_("Deactivate addons: %s") % ", ".join(sorted(deactive))) + + def manageAddons(self, plugin, name, value): + # TODO: user + + # check if section was a plugin + if plugin not in self.core.pluginManager.getPlugins("addons"): + return + + if name == "activated" and value: + self.activateAddon(plugin) + elif name == "activated" and not value: + self.deactivateAddon(plugin) + + @lock + def activateAddon(self, plugin): + #check if already loaded + if plugin in self.plugins: + return + + pluginClass = self.core.pluginManager.loadClass("addons", plugin) + + if not pluginClass: return + + self.log.debug("Plugin loaded: %s" % plugin) + + plugin = pluginClass(self.core, self) + self.plugins[pluginClass.__name__] = plugin + + # active the addon in new thread + start_new_thread(plugin.activate, tuple()) + self.registerEvents() # TODO: BUG: events will be destroyed and not re-registered + + @lock + def deactivateAddon(self, plugin): + if plugin not in self.plugins: + return + else: + addon = self.plugins[plugin] + + if addon.__internal__: return + + self.call(addon, "deactivate") + self.log.debug("Plugin deactivated: %s" % plugin) + + #remove periodic call + self.log.debug("Removed callback %s" % self.core.scheduler.removeJob(addon.cb)) + del self.plugins[addon.__name__] + + #remove event listener + for f in dir(addon): + if f.startswith("__") or type(getattr(addon, f)) != MethodType: + continue + self.core.eventManager.removeFromEvents(getattr(addon, f)) + + def activateAddons(self): + self.log.info(_("Activating Plugins...")) + for plugin in self.plugins.itervalues(): + if plugin.isActivated(): + self.call(plugin, "activate") + + self.registerEvents() + + def deactivateAddons(self): + """ Called when core is shutting down """ + self.log.info(_("Deactivating Plugins...")) + for plugin in self.plugins.itervalues(): + self.call(plugin, "deactivate") + + def downloadPreparing(self, pyfile): + self.callInHooks("downloadPreparing", "download:preparing", pyfile) + + def downloadFinished(self, pyfile): + self.callInHooks("downloadFinished", "download:finished", pyfile) + + def downloadFailed(self, pyfile): + self.callInHooks("downloadFailed", "download:failed", pyfile) + + def packageFinished(self, package): + self.callInHooks("packageFinished", "package:finished", package) + + def beforeReconnecting(self, ip): + self.callInHooks("beforeReconnecting", "reconnecting:before", ip) + + def afterReconnecting(self, ip): + self.callInHooks("afterReconnecting", "reconnecting:after", ip) + + @lock + def startThread(self, function, *args, **kwargs): + AddonThread(self.core.threadManager, function, args, kwargs) + + def activePlugins(self): + """ returns all active plugins """ + return [x for x in self.plugins.itervalues() if x.isActivated()] + + def getAllInfo(self): + """returns info stored by addon plugins""" + info = {} + for name, plugin in self.plugins.iteritems(): + if plugin.info: + #copy and convert so str + info[name] = dict( + [(x, to_string(y)) for x, y in plugin.info.iteritems()]) + return info + + def getInfo(self, plugin): + info = {} + if plugin in self.plugins and self.plugins[plugin].info: + info = dict([(x, to_string(y)) + for x, y in self.plugins[plugin].info.iteritems()]) + + return info + + def addEventListener(self, plugin, func, event): + """ add the event to the list """ + if plugin not in self.events: + self.events[plugin] = [] + self.events[plugin].append((func, event)) + + def registerEvents(self): + """ actually register all saved events """ + for name, plugin in self.plugins.iteritems(): + if name in self.events: + for func, event in self.events[name]: + self.addEvent(event, getattr(plugin, func)) + # clean up + del self.events[name] + + def addEvent(self, *args): + self.core.eventManager.addEvent(*args) + + def dispatchEvent(self, *args): + self.core.eventManager.dispatchEvent(*args) + diff --git a/pyload/Api.py b/pyload/Api.py new file mode 100644 index 000000000..32a077c08 --- /dev/null +++ b/pyload/Api.py @@ -0,0 +1,206 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +############################################################################### +# Copyright(c) 2008-2013 pyLoad Team +# http://www.pyload.org +# +# This file is part of pyLoad. +# pyLoad is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# Subjected to the terms and conditions in LICENSE +# +# @author: RaNaN +############################################################################### + +import re +from types import MethodType + +from remote.apitypes import * + +# contains function names mapped to their permissions +# unlisted functions are for admins only +perm_map = {} + +# decorator only called on init, never initialized, so has no effect on runtime +def RequirePerm(bits): + class _Dec(object): + def __new__(cls, func, *args, **kwargs): + perm_map[func.__name__] = bits + return func + + return _Dec + +urlmatcher = re.compile(r"((https?|ftps?|xdcc|sftp):((//)|(\\\\))+[\w\d:#@%/;$()~_?\+\-=\\\.&]*)", re.IGNORECASE) + +stateMap = { + DownloadState.All: frozenset(getattr(DownloadStatus, x) for x in dir(DownloadStatus) if not x.startswith("_")), + DownloadState.Finished: frozenset((DownloadStatus.Finished, DownloadStatus.Skipped)), + DownloadState.Unfinished: None, # set below + DownloadState.Failed: frozenset((DownloadStatus.Failed, DownloadStatus.TempOffline, DownloadStatus.Aborted)), + DownloadState.Unmanaged: None, #TODO +} + +stateMap[DownloadState.Unfinished] = frozenset(stateMap[DownloadState.All].difference(stateMap[DownloadState.Finished])) + +def state_string(state): + return ",".join(str(x) for x in stateMap[state]) + +from datatypes.User import User + +class Api(Iface): + """ + **pyLoads API** + + This is accessible either internal via core.api, websocket backend or json api. + + see Thrift specification file remote/thriftbackend/pyload.thrift\ + for information about data structures and what methods are usable with rpc. + + Most methods requires specific permissions, please look at the source code if you need to know.\ + These can be configured via web interface. + Admin user have all permissions, and are the only ones who can access the methods with no specific permission. + """ + + EXTERNAL = Iface # let the json api know which methods are external + EXTEND = False # only extendable when set too true + + def __init__(self, core): + self.core = core + self.user_apis = {} + + @property + def user(self): + return None #TODO return default user? + + @property + def primaryUID(self): + return self.user.primary if self.user else None + + @classmethod + def initComponents(cls): + # Allow extending the api + # This prevents unintentionally registering of the components, + # but will only work once when they are imported + cls.EXTEND = True + # Import all Api modules, they register themselves. + import pyload.api + # they will vanish from the namespace afterwards + + + @classmethod + def extend(cls, api): + """Takes all params from api and extends cls with it. + api class can be removed afterwards + + :param api: Class with methods to extend + """ + if cls.EXTEND: + for name, func in api.__dict__.iteritems(): + if name.startswith("_"): continue + setattr(cls, name, MethodType(func, None, cls)) + + return cls.EXTEND + + def withUserContext(self, uid): + """ Returns a proxy version of the api, to call method in user context + + :param uid: user or userData instance or uid + :return: :class:`UserApi` + """ + if isinstance(uid, User): + uid = uid.uid + + if uid not in self.user_apis: + user = self.core.db.getUserData(uid=uid) + if not user: #TODO: anonymous user? + return None + + self.user_apis[uid] = UserApi(self.core, User.fromUserData(self, user)) + + return self.user_apis[uid] + + + ############################# + # Auth+User Information + ############################# + + # TODO + + @RequirePerm(Permission.All) + def login(self, username, password, remoteip=None): + """Login into pyLoad, this **must** be called when using rpc before any methods can be used. + + :param username: + :param password: + :param remoteip: Omit this argument, its only used internal + :return: bool indicating login was successful + """ + return True if self.checkAuth(username, password, remoteip) else False + + def checkAuth(self, username, password, remoteip=None): + """Check authentication and returns details + + :param username: + :param password: + :param remoteip: + :return: dict with info, empty when login is incorrect + """ + self.core.log.info(_("User '%s' tries to log in") % username) + + return self.core.db.checkAuth(username, password) + + def isAuthorized(self, func, user): + """checks if the user is authorized for specific method + + :param func: function name + :param user: `User` + :return: boolean + """ + if user.isAdmin(): + return True + elif func in perm_map and user.hasPermission(perm_map[func]): + return True + else: + return False + + # TODO + @RequirePerm(Permission.All) + def getUserData(self, username, password): + """similar to `checkAuth` but returns UserData thrift type """ + user = self.checkAuth(username, password) + if not user: + raise UserDoesNotExists(username) + + return user.toUserData() + + def getAllUserData(self): + """returns all known user and info""" + return self.core.db.getAllUserData() + + def changePassword(self, username, oldpw, newpw): + """ changes password for specific user """ + return self.core.db.changePassword(username, oldpw, newpw) + + def setUserPermission(self, user, permission, role): + self.core.db.setPermission(user, permission) + self.core.db.setRole(user, role) + + +class UserApi(Api): + """ Proxy object for api that provides all methods in user context """ + + def __init__(self, core, user): + # No need to init super class + self.core = core + self._user = user + + def withUserContext(self, uid): + raise Exception("Not allowed") + + @property + def user(self): + return self._user \ No newline at end of file diff --git a/pyload/FileManager.py b/pyload/FileManager.py new file mode 100644 index 000000000..b1d3891e9 --- /dev/null +++ b/pyload/FileManager.py @@ -0,0 +1,582 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +############################################################################### +# Copyright(c) 2008-2012 pyLoad Team +# http://www.pyload.org +# +# This file is part of pyLoad. +# pyLoad is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# Subjected to the terms and conditions in LICENSE +# +# @author: RaNaN +############################################################################### + +from time import time +from ReadWriteLock import ReadWriteLock + +from pyload.utils import lock, read_lock + +from Api import PackageStatus, DownloadStatus as DS, TreeCollection, PackageDoesNotExists +from datatypes.PyFile import PyFile +from datatypes.PyPackage import PyPackage, RootPackage + +# invalidates the cache +def invalidate(func): + def new(*args): + args[0].downloadstats = {} + args[0].queuestats = {} + args[0].jobCache = {} + return func(*args) + + return new + +# TODO: needs to be replaced later +OWNER = 0 + +class FileManager: + """Handles all request made to obtain information, + modify status or other request for links or packages""" + + ROOT_PACKAGE = -1 + + def __init__(self, core): + """Constructor""" + self.core = core + self.evm = core.eventManager + + # translations + self.statusMsg = [_("none"), _("offline"), _("online"), _("queued"), _("paused"), + _("finished"), _("skipped"), _("failed"), _("starting"), + _("waiting"), _("downloading"), _("temp. offline"), _("aborted"), + _("decrypting"), _("processing"), _("custom"), _("unknown")] + + self.files = {} # holds instances for files + self.packages = {} # same for packages + + self.jobCache = {} + + # locking the caches, db is already locked implicit + self.lock = ReadWriteLock() + #self.lock._Verbose__verbose = True + + self.downloadstats = {} # cached dl stats + self.queuestats = {} # cached queue stats + + self.db = self.core.db + + def save(self): + """saves all data to backend""" + self.db.commit() + + @read_lock + def syncSave(self): + """saves all data to backend and waits until all data are written""" + for pyfile in self.files.values(): + pyfile.sync() + + for pypack in self.packages.values(): + pypack.sync() + + self.db.syncSave() + + def cachedFiles(self): + return self.files.values() + + def cachedPackages(self): + return self.packages.values() + + def getCollector(self): + pass + + @invalidate + def addLinks(self, data, package): + """Add links, data = (plugin, url) tuple. Internal method should use API.""" + self.db.addLinks(data, package, OWNER) + self.evm.dispatchEvent("package:updated", package) + + + @invalidate + def addPackage(self, name, folder, root, password, site, comment, paused): + """Adds a package to database""" + pid = self.db.addPackage(name, folder, root, password, site, comment, + PackageStatus.Paused if paused else PackageStatus.Ok, OWNER) + p = self.db.getPackageInfo(pid) + + self.evm.dispatchEvent("package:inserted", pid, p.root, p.packageorder) + return pid + + + @lock + def getPackage(self, pid): + """return package instance""" + if pid == self.ROOT_PACKAGE: + return RootPackage(self, OWNER) + elif pid in self.packages: + pack = self.packages[pid] + pack.timestamp = time() + return pack + else: + info = self.db.getPackageInfo(pid, False) + if not info: return None + + pack = PyPackage.fromInfoData(self, info) + self.packages[pid] = pack + + return pack + + @read_lock + def getPackageInfo(self, pid): + """returns dict with package information""" + if pid == self.ROOT_PACKAGE: + pack = RootPackage(self, OWNER).toInfoData() + elif pid in self.packages: + pack = self.packages[pid].toInfoData() + pack.stats = self.db.getStatsForPackage(pid) + else: + pack = self.db.getPackageInfo(pid) + + if not pack: return None + + # todo: what does this todo mean?! + #todo: fill child packs and files + packs = self.db.getAllPackages(root=pid) + if pid in packs: del packs[pid] + pack.pids = packs.keys() + + files = self.db.getAllFiles(package=pid) + pack.fids = files.keys() + + return pack + + @lock + def getFile(self, fid): + """returns pyfile instance""" + if fid in self.files: + return self.files[fid] + else: + info = self.db.getFileInfo(fid) + if not info: return None + + f = PyFile.fromInfoData(self, info) + self.files[fid] = f + return f + + @read_lock + def getFileInfo(self, fid): + """returns dict with file information""" + if fid in self.files: + return self.files[fid].toInfoData() + + return self.db.getFileInfo(fid) + + @read_lock + def getTree(self, pid, full, state, search=None): + """ return a TreeCollection and fill the info data of containing packages. + optional filter only unfnished files + """ + view = TreeCollection(pid) + + # for depth=1, we don't need to retrieve all files/packages + root = pid if not full else None + + packs = self.db.getAllPackages(root) + files = self.db.getAllFiles(package=root, state=state, search=search) + + # updating from cache + for fid, f in self.files.iteritems(): + if fid in files: + files[fid] = f.toInfoData() + + # foreign pid, don't overwrite local pid ! + for fpid, p in self.packages.iteritems(): + if fpid in packs: + # copy the stats data + stats = packs[fpid].stats + packs[fpid] = p.toInfoData() + packs[fpid].stats = stats + + # root package is not in database, create an instance + if pid == self.ROOT_PACKAGE: + view.root = RootPackage(self, OWNER).toInfoData() + packs[self.ROOT_PACKAGE] = view.root + elif pid in packs: + view.root = packs[pid] + else: # package does not exists + return view + + # linear traversal over all data + for fpid, p in packs.iteritems(): + if p.fids is None: p.fids = [] + if p.pids is None: p.pids = [] + + root = packs.get(p.root, None) + if root: + if root.pids is None: root.pids = [] + root.pids.append(fpid) + + for fid, f in files.iteritems(): + p = packs.get(f.package, None) + if p: p.fids.append(fid) + + + # cutting of tree is not good in runtime, only saves bandwidth + # need to remove some entries + if full and pid > -1: + keep = [] + queue = [pid] + while queue: + fpid = queue.pop() + keep.append(fpid) + queue.extend(packs[fpid].pids) + + # now remove unneeded data + for fpid in packs.keys(): + if fpid not in keep: + del packs[fpid] + + for fid, f in files.items(): + if f.package not in keep: + del files[fid] + + #remove root + del packs[pid] + view.files = files + view.packages = packs + + return view + + + @lock + def getJob(self, occ): + """get suitable job""" + + #TODO only accessed by one thread, should not need a lock + #TODO needs to be approved for new database + #TODO clean mess + #TODO improve selection of valid jobs + + if occ in self.jobCache: + if self.jobCache[occ]: + id = self.jobCache[occ].pop() + if id == "empty": + pyfile = None + self.jobCache[occ].append("empty") + else: + pyfile = self.getFile(id) + else: + jobs = self.db.getJob(occ) + jobs.reverse() + if not jobs: + self.jobCache[occ].append("empty") + pyfile = None + else: + self.jobCache[occ].extend(jobs) + pyfile = self.getFile(self.jobCache[occ].pop()) + + else: + self.jobCache = {} #better not caching to much + jobs = self.db.getJob(occ) + jobs.reverse() + self.jobCache[occ] = jobs + + if not jobs: + self.jobCache[occ].append("empty") + pyfile = None + else: + pyfile = self.getFile(self.jobCache[occ].pop()) + + + return pyfile + + def getDownloadStats(self, user=None): + """ return number of downloads """ + if user not in self.downloadstats: + self.downloadstats[user] = self.db.downloadstats(user) + + return self.downloadstats[user] + + def getQueueStats(self, user=None, force=False): + """number of files that have to be processed, failed files will not be included""" + if user not in self.queuestats or force: + self.queuestats[user] = self.db.queuestats(user) + + return self.queuestats[user] + + def scanDownloadFolder(self): + pass + + @lock + @invalidate + def deletePackage(self, pid): + """delete package and all contained links""" + + p = self.getPackage(pid) + if not p: return + + oldorder = p.packageorder + root = p.root + + for pyfile in self.cachedFiles(): + if pyfile.packageid == pid: + pyfile.abortDownload() + + # TODO: delete child packages + # TODO: delete folder + + self.db.deletePackage(pid) + self.releasePackage(pid) + + for pack in self.cachedPackages(): + if pack.root == root and pack.packageorder > oldorder: + pack.packageorder -= 1 + + self.evm.dispatchEvent("package:deleted", pid) + + @lock + @invalidate + def deleteFile(self, fid): + """deletes links""" + + f = self.getFile(fid) + if not f: return + + pid = f.packageid + order = f.fileorder + + if fid in self.core.threadManager.processingIds(): + f.abortDownload() + + # TODO: delete real file + + self.db.deleteFile(fid, f.fileorder, f.packageid) + self.releaseFile(fid) + + for pyfile in self.files.itervalues(): + if pyfile.packageid == pid and pyfile.fileorder > order: + pyfile.fileorder -= 1 + + self.evm.dispatchEvent("file:deleted", fid, pid) + + @lock + def releaseFile(self, fid): + """removes pyfile from cache""" + if fid in self.files: + del self.files[fid] + + @lock + def releasePackage(self, pid): + """removes package from cache""" + if pid in self.packages: + del self.packages[pid] + + def updateFile(self, pyfile): + """updates file""" + self.db.updateFile(pyfile) + + # This event is thrown with pyfile or only fid + self.evm.dispatchEvent("file:updated", pyfile) + + def updatePackage(self, pypack): + """updates a package""" + self.db.updatePackage(pypack) + self.evm.dispatchEvent("package:updated", pypack.pid) + + @invalidate + def updateFileInfo(self, data, pid): + """ updates file info (name, size, status,[ hash,] url)""" + self.db.updateLinkInfo(data) + self.evm.dispatchEvent("package:updated", pid) + + def checkAllLinksFinished(self): + """checks if all files are finished and dispatch event""" + + # TODO: user context? + if not self.db.queuestats()[0]: + self.core.addonManager.dispatchEvent("download:allFinished") + self.core.log.debug("All downloads finished") + return True + + return False + + def checkAllLinksProcessed(self, fid=-1): + """checks if all files was processed and pyload would idle now, needs fid which will be ignored when counting""" + + # reset count so statistic will update (this is called when dl was processed) + self.resetCount() + + # TODO: user context? + if not self.db.processcount(fid): + self.core.addonManager.dispatchEvent("download:allProcessed") + self.core.log.debug("All downloads processed") + return True + + return False + + def checkPackageFinished(self, pyfile): + """ checks if package is finished and calls addonmanager """ + + ids = self.db.getUnfinished(pyfile.packageid) + if not ids or (pyfile.id in ids and len(ids) == 1): + if not pyfile.package().setFinished: + self.core.log.info(_("Package finished: %s") % pyfile.package().name) + self.core.addonManager.packageFinished(pyfile.package()) + pyfile.package().setFinished = True + + def resetCount(self): + self.queuecount = -1 + + @read_lock + @invalidate + def restartPackage(self, pid): + """restart package""" + for pyfile in self.cachedFiles(): + if pyfile.packageid == pid: + self.restartFile(pyfile.id) + + self.db.restartPackage(pid) + + if pid in self.packages: + self.packages[pid].setFinished = False + + self.evm.dispatchEvent("package:updated", pid) + + @read_lock + @invalidate + def restartFile(self, fid): + """ restart file""" + if fid in self.files: + f = self.files[fid] + f.status = DS.Queued + f.name = f.url + f.error = "" + f.abortDownload() + + self.db.restartFile(fid) + self.evm.dispatchEvent("file:updated", fid) + + + @lock + @invalidate + def orderPackage(self, pid, position): + + p = self.getPackageInfo(pid) + self.db.orderPackage(pid, p.root, p.packageorder, position) + + for pack in self.packages.itervalues(): + if pack.root != p.root or pack.packageorder < 0: continue + if pack.pid == pid: + pack.packageorder = position + if p.packageorder > position: + if position <= pack.packageorder < p.packageorder: + pack.packageorder += 1 + elif p.order < position: + if position >= pack.packageorder > p.packageorder: + pack.packageorder -= 1 + + self.db.commit() + + self.evm.dispatchEvent("package:reordered", pid, position, p.root) + + @lock + @invalidate + def orderFiles(self, fids, pid, position): + + files = [self.getFileInfo(fid) for fid in fids] + orders = [f.fileorder for f in files] + if min(orders) + len(files) != max(orders) + 1: + raise Exception("Tried to reorder non continous block of files") + + # minimum fileorder + f = reduce(lambda x,y: x if x.fileorder < y.fileorder else y, files) + order = f.fileorder + + self.db.orderFiles(pid, fids, order, position) + diff = len(fids) + + if f.fileorder > position: + for pyfile in self.files.itervalues(): + if pyfile.packageid != f.package or pyfile.fileorder < 0: continue + if position <= pyfile.fileorder < f.fileorder: + pyfile.fileorder += diff + + for i, fid in enumerate(fids): + if fid in self.files: + self.files[fid].fileorder = position + i + + elif f.fileorder < position: + for pyfile in self.files.itervalues(): + if pyfile.packageid != f.package or pyfile.fileorder < 0: continue + if position >= pyfile.fileorder >= f.fileorder+diff: + pyfile.fileorder -= diff + + for i, fid in enumerate(fids): + if fid in self.files: + self.files[fid].fileorder = position -diff + i + 1 + + self.db.commit() + + self.evm.dispatchEvent("file:reordered", pid) + + @read_lock + @invalidate + def movePackage(self, pid, root): + """ move pid - root """ + + p = self.getPackageInfo(pid) + dest = self.getPackageInfo(root) + if not p: raise PackageDoesNotExists(pid) + if not dest: raise PackageDoesNotExists(root) + + # cantor won't be happy if we put the package in itself + if pid == root or p.root == root: return False + + # TODO move real folders + + # we assume pack is not in use anyway, so we can release it + self.releasePackage(pid) + self.db.movePackage(p.root, p.packageorder, pid, root) + + return True + + @read_lock + @invalidate + def moveFiles(self, fids, pid): + """ move all fids to pid """ + + f = self.getFileInfo(fids[0]) + if not f or f.package == pid: + return False + if not self.getPackageInfo(pid): + raise PackageDoesNotExists(pid) + + # TODO move real files + + self.db.moveFiles(f.package, fids, pid) + + return True + + + @invalidate + def reCheckPackage(self, pid): + """ recheck links in package """ + data = self.db.getPackageData(pid) + + urls = [] + + for pyfile in data.itervalues(): + if pyfile.status not in (DS.NA, DS.Finished, DS.Skipped): + urls.append((pyfile.url, pyfile.pluginname)) + + self.core.threadManager.createInfoThread(urls, pid) + + + @invalidate + def restartFailed(self): + """ restart all failed links """ + # failed should not be in cache anymore, so working on db is sufficient + self.db.restartFailed() diff --git a/pyload/InitHomeDir.py b/pyload/InitHomeDir.py new file mode 100644 index 000000000..8fca42196 --- /dev/null +++ b/pyload/InitHomeDir.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: RaNaN + + This modules inits working directories and global variables, pydir and homedir +""" + +from os import makedirs, path, chdir +from os.path import join +import sys +from sys import argv, platform + +import __builtin__ + +__builtin__.owd = path.abspath("") #original working directory +__builtin__.pypath = path.abspath(path.join(__file__, "..", "..")) + +sys.path.append(join(pypath, "pyload", "lib")) + +homedir = "" + +if platform == 'nt': + homedir = path.expanduser("~") + if homedir == "~": + import ctypes + + CSIDL_APPDATA = 26 + _SHGetFolderPath = ctypes.windll.shell32.SHGetFolderPathW + _SHGetFolderPath.argtypes = [ctypes.wintypes.HWND, + ctypes.c_int, + ctypes.wintypes.HANDLE, + ctypes.wintypes.DWORD, ctypes.wintypes.LPCWSTR] + + path_buf = ctypes.wintypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH) + result = _SHGetFolderPath(0, CSIDL_APPDATA, 0, 0, path_buf) + homedir = path_buf.value +else: + homedir = path.expanduser("~") + +__builtin__.homedir = homedir + +configdir = None +args = " ".join(argv) +# dirty method to set configdir from commandline arguments +if "--configdir=" in args: + for arg in argv: + if arg.startswith("--configdir="): + configdir = arg.replace('--configdir=', '').strip() + +elif "nosetests" in args: + print "Running in test mode" + configdir = join(pypath, "tests", "config") + +elif path.exists(path.join(pypath, "pyload", "config", "configdir")): + f = open(path.join(pypath, "pyload", "config", "configdir"), "rb") + c = f.read().strip() + f.close() + configdir = path.join(pypath, c) + +# default config dir +if not configdir: + if platform in ("posix", "linux2", "darwin"): + configdir = path.join(homedir, ".pyload") + else: + configdir = path.join(homedir, "pyload") + +if not path.exists(configdir): + makedirs(configdir, 0700) + +__builtin__.configdir = configdir +chdir(configdir) + +#print "Using %s as working directory." % configdir diff --git a/pyload/PluginManager.py b/pyload/PluginManager.py new file mode 100644 index 000000000..159e7d9de --- /dev/null +++ b/pyload/PluginManager.py @@ -0,0 +1,426 @@ +# -*- coding: utf-8 -*- + +############################################################################### +# Copyright(c) 2008-2013 pyLoad Team +# http://www.pyload.org +# +# This file is part of pyLoad. +# pyLoad is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# Subjected to the terms and conditions in LICENSE +# +# @author: RaNaN, mkaay +############################################################################### + +import re +import sys + +from os import listdir, makedirs +from os.path import isfile, join, exists, abspath, basename +from sys import version_info +from time import time + +from pyload.lib.SafeEval import const_eval as literal_eval +from pyload.plugins.Base import Base + +from new_collections import namedtuple + +#TODO: ignores not updatable + +# ignore these plugin configs, mainly because plugins were wiped out +IGNORE = ( + "FreakshareNet", "SpeedManager", "ArchiveTo", "ShareCx", ('addons', 'UnRar'), + 'EasyShareCom', 'FlyshareCz' + ) + +PluginTuple = namedtuple("PluginTuple", "version re deps category user path") + +class PluginManager: + ROOT = "pyload.plugins." + LOCALROOT = "localplugins." + TYPES = ("crypter", "hoster", "accounts", "addons", "internal") + + BUILTIN = re.compile(r'__(?P[a-z0-9_]+)__\s*=\s?(True|False|None|[0-9x.]+)', re.I) + SINGLE = re.compile(r'__(?P[a-z0-9_]+)__\s*=\s*(?:r|u|_)?((?:(?[a-z0-9_]+)__\s*=\s*((?:\{|\[|"{3}).*?(?:"""|\}|\]))', re.DOTALL | re.M | re.I) + + NO_MATCH = re.compile(r'^no_match$') + + def __init__(self, core): + self.core = core + + #self.config = self.core.config + self.log = core.log + + self.plugins = {} + self.modules = {} # cached modules + self.history = [] # match history to speedup parsing (type, name) + self.user_context = {} # plugins working with user context + self.createIndex() + + #register for import addon + sys.meta_path.append(self) + + def logDebug(self, type, plugin, msg): + self.log.debug("Plugin %s | %s: %s" % (type, plugin, msg)) + + def createIndex(self): + """create information for all plugins available""" + # add to path, so we can import from userplugins + sys.path.append(abspath("")) + + if not exists("userplugins"): + makedirs("userplugins") + if not exists(join("userplugins", "__init__.py")): + f = open(join("userplugins", "__init__.py"), "wb") + f.close() + + a = time() + for type in self.TYPES: + self.plugins[type] = self.parse(type) + + self.log.debug("Created index of plugins in %.2f ms", (time() - a) * 1000) + + def parse(self, folder, home=None): + """ Analyze and parses all plugins in folder """ + plugins = {} + if home: + pfolder = join("userplugins", folder) + if not exists(pfolder): + makedirs(pfolder) + if not exists(join(pfolder, "__init__.py")): + f = open(join(pfolder, "__init__.py"), "wb") + f.close() + + else: + pfolder = join(pypath, "pyload", "plugins", folder) + + for f in listdir(pfolder): + if (isfile(join(pfolder, f)) and f.endswith(".py") or f.endswith("_25.pyc") or f.endswith( + "_26.pyc") or f.endswith("_27.pyc")) and not f.startswith("_"): + if f.endswith("_25.pyc") and version_info[0:2] != (2, 5): + continue + elif f.endswith("_26.pyc") and version_info[0:2] != (2, 6): + continue + elif f.endswith("_27.pyc") and version_info[0:2] != (2, 7): + continue + + # replace suffix and version tag + name = f[:-3] + if name[-1] == ".": name = name[:-4] + + plugin = self.parsePlugin(join(pfolder, f), folder, name, home) + if plugin: + plugins[name] = plugin + + if not home: + temp = self.parse(folder, plugins) + plugins.update(temp) + + return plugins + + def parseAttributes(self, filename, name, folder=""): + """ Parse attribute dict from plugin""" + data = open(filename, "rb") + content = data.read() + data.close() + + attrs = {} + for m in self.BUILTIN.findall(content) + self.SINGLE.findall(content) + self.MULTI.findall(content): + #replace gettext function and eval result + try: + attrs[m[0]] = literal_eval(m[-1].replace("_(", "(")) + except: + self.logDebug(folder, name, "Error when parsing: %s" % m[-1]) + self.core.print_exc() + + if not hasattr(Base, "__%s__" % m[0]): + if m[0] != "type": #TODO remove type from all plugins, its not needed + self.logDebug(folder, name, "Unknown attribute '%s'" % m[0]) + + return attrs + + def parsePlugin(self, filename, folder, name, home=None): + """ Parses a plugin from disk, folder means plugin type in this context. Also sets config. + + :arg home: dict with plugins, of which the found one will be matched against (according version) + :returns PluginTuple""" + + attrs = self.parseAttributes(filename, name, folder) + if not attrs: return + + version = 0 + + if "version" in attrs: + try: + version = float(attrs["version"]) + except ValueError: + self.logDebug(folder, name, "Invalid version %s" % attrs["version"]) + version = 9 #TODO remove when plugins are fixed, causing update loops + else: + self.logDebug(folder, name, "No version attribute") + + # home contains plugins from pyload root + if home and name in home: + if home[name].version >= version: + return + + if name in IGNORE or (folder, name) in IGNORE: + return + + if "pattern" in attrs and attrs["pattern"]: + try: + plugin_re = re.compile(attrs["pattern"], re.I) + except: + self.logDebug(folder, name, "Invalid regexp pattern '%s'" % attrs["pattern"]) + plugin_re = self.NO_MATCH + else: plugin_re = self.NO_MATCH + + deps = attrs.get("dependencies", None) + category = attrs.get("category", None) if folder == "addons" else None + + # create plugin tuple + plugin = PluginTuple(version, plugin_re, deps, category, bool(home), filename) + + # internals have no config + if folder == "internal": + return plugin + + if folder == "addons" and "config" not in attrs and not attrs.get("internal", False): + attrs["config"] = (["activated", "bool", "Activated", False],) + + if "config" in attrs and attrs["config"]: + config = attrs["config"] + desc = attrs.get("description", "") + long_desc = attrs.get("long_description", "") + + if type(config[0]) == tuple: + config = [list(x) for x in config] + else: + config = [list(config)] + + if folder == "addons" and not attrs.get("internal", False): + for item in config: + if item[0] == "activated": break + else: # activated flag missing + config.insert(0, ("activated", "bool", "Activated", False)) + + # Everything that is no addon and user_context=True, is added to dict + if folder != "addons" or attrs.get("user_context", False): + self.user_context[name] = True + + try: + self.core.config.addConfigSection(name, name, desc, long_desc, config) + except: + self.logDebug(folder, name, "Invalid config %s" % config) + + return plugin + + + def parseUrls(self, urls): + """parse plugins for given list of urls, separate to crypter and hoster""" + + res = {"hoster": [], "crypter": []} # tupels of (url, plugin) + + for url in urls: + if type(url) not in (str, unicode, buffer): + self.log.debug("Parsing invalid type %s" % type(url)) + continue + + found = False + + for ptype, name in self.history: + if self.plugins[ptype][name].re.match(url): + res[ptype].append((url, name)) + found = (ptype, name) + break # need to exit this loop first + + if found: # found match + if self.history[0] != found: #update history + self.history.remove(found) + self.history.insert(0, found) + continue + + for ptype in ("crypter", "hoster"): + for name, plugin in self.plugins[ptype].iteritems(): + if plugin.re.match(url): + res[ptype].append((url, name)) + self.history.insert(0, (ptype, name)) + del self.history[10:] # cut down to size of 10 + found = True + break + + if not found: + res["hoster"].append((url, "BasePlugin")) + + return res["hoster"], res["crypter"] + + def getPlugins(self, type): + return self.plugins.get(type, None) + + def findPlugin(self, name, pluginlist=("hoster", "crypter")): + for ptype in pluginlist: + if name in self.plugins[ptype]: + return ptype, self.plugins[ptype][name] + return None, None + + def getPluginModule(self, name): + """ Decprecated: return plugin module from hoster|crypter""" + self.log.debug("Deprecated method: .getPluginModule()") + type, plugin = self.findPlugin(name) + return self.loadModule(type, name) + + def getPluginClass(self, name): + """ return plugin class from hoster|crypter, always the not overwritten one """ + type, plugin = self.findPlugin(name) + return self.loadClass(type, name) + + # MultiHoster will overwrite this + getPlugin = getPluginClass + + def loadAttributes(self, type, name): + plugin = self.plugins[type][name] + return self.parseAttributes(plugin.path, name, type) + + def loadModule(self, type, name): + """ Returns loaded module for plugin + + :param type: plugin type, subfolder of module.plugins + :param name: + """ + plugins = self.plugins[type] + if name in plugins: + if (type, name) in self.modules: return self.modules[(type, name)] + try: + # convert path to python recognizable import + path = basename(plugins[name].path).replace(".pyc", "").replace(".py", "") + module = __import__(self.ROOT + "%s.%s" % (type, path), globals(), locals(), path) + self.modules[(type, name)] = module # cache import, maybe unneeded + return module + except Exception, e: + self.log.error(_("Error importing %(name)s: %(msg)s") % {"name": name, "msg": str(e)}) + self.core.print_exc() + + def loadClass(self, type, name): + """Returns the class of a plugin with the same name""" + module = self.loadModule(type, name) + if module: return getattr(module, name) + + def find_module(self, fullname, path=None): + #redirecting imports if necesarry + if fullname.startswith(self.ROOT) or fullname.startswith(self.LOCALROOT): #separate pyload plugins + if fullname.startswith(self.LOCALROOT): user = 1 + else: user = 0 #used as bool and int + + split = fullname.split(".") + if len(split) != 4 - user: return + type, name = split[2 - user:4 - user] + + if type in self.plugins and name in self.plugins[type]: + #userplugin is a newer version + if not user and self.plugins[type][name].user: + return self + #imported from userdir, but pyloads is newer + if user and not self.plugins[type][name].user: + return self + + # TODO: Remove when all plugin imports are adapted + if "module" in fullname: + return self + + + def load_module(self, name, replace=True): + if name not in sys.modules: #could be already in modules + + # TODO: only temporary + if name.endswith("module"): + # name = "pyload." + name = name.replace(".module", "") + self.log.debug("Old import reference detected, use %s" % name) + replace = False + return __import__("pyload") + if name.startswith("module"): + name = name.replace("module", "pyload") + self.log.debug("Old import reference detected, use %s" % name) + replace = False + + + if replace: + if self.ROOT in name: + newname = name.replace(self.ROOT, self.LOCALROOT) + else: + newname = name.replace(self.LOCALROOT, self.ROOT) + else: newname = name + + base, plugin = newname.rsplit(".", 1) + + self.log.debug("Redirected import %s -> %s" % (name, newname)) + + module = __import__(newname, globals(), locals(), [plugin]) + #inject under new an old name + sys.modules[name] = module + sys.modules[newname] = module + + return sys.modules[name] + + def reloadPlugins(self, type_plugins): + """ reloads and reindexes plugins """ + if not type_plugins: return False + + self.log.debug("Request reload of plugins: %s" % type_plugins) + + as_dict = {} + for t, n in type_plugins: + if t in as_dict: + as_dict[t].append(n) + else: + as_dict[t] = [n] + + # we do not reload addons or internals, would cause to much side effects + if "addons" in as_dict or "internal" in as_dict: + return False + + for type in as_dict.iterkeys(): + for plugin in as_dict[type]: + if plugin in self.plugins[type]: + if (type, plugin) in self.modules: + self.log.debug("Reloading %s" % plugin) + reload(self.modules[(type, plugin)]) + + # index re-creation + for type in ("crypter", "container", "hoster", "captcha", "accounts"): + self.plugins[type] = self.parse(type) + + if "accounts" in as_dict: #accounts needs to be reloaded + self.core.accountManager.initPlugins() + self.core.scheduler.addJob(0, self.core.accountManager.getAccountInfos) + + return True + + def isUserPlugin(self, plugin): + """ A plugin suitable for multiple user """ + return plugin in self.user_context + + def isPluginType(self, plugin, type): + return plugin in self.plugins[type] + + def getCategory(self, plugin): + if plugin in self.plugins["addons"]: + return self.plugins["addons"][plugin].category or "addon" + + def loadIcon(self, name): + """ load icon for single plugin, base64 encoded""" + pass + + def checkDependencies(self, type, name): + """ Check deps for given plugin + + :return: List of unfullfilled dependencies + """ + pass + diff --git a/pyload/Scheduler.py b/pyload/Scheduler.py new file mode 100644 index 000000000..0bc396b69 --- /dev/null +++ b/pyload/Scheduler.py @@ -0,0 +1,141 @@ +# -*- coding: utf-8 -*- + +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: mkaay +""" + +from time import time +from heapq import heappop, heappush +from thread import start_new_thread +from threading import Lock + +class AlreadyCalled(Exception): + pass + + +class Deferred(): + def __init__(self): + self.call = [] + self.result = () + + def addCallback(self, f, *cargs, **ckwargs): + self.call.append((f, cargs, ckwargs)) + + def callback(self, *args, **kwargs): + if self.result: + raise AlreadyCalled + self.result = (args, kwargs) + for f, cargs, ckwargs in self.call: + args += tuple(cargs) + kwargs.update(ckwargs) + f(*args ** kwargs) + + +class Scheduler(): + def __init__(self, core): + self.core = core + + self.queue = PriorityQueue() + + def addJob(self, t, call, args=[], kwargs={}, threaded=True): + d = Deferred() + t += time() + j = Job(t, call, args, kwargs, d, threaded) + self.queue.put((t, j)) + return d + + + def removeJob(self, d): + """ + :param d: defered object + :return: if job was deleted + """ + index = -1 + + for i, j in enumerate(self.queue): + if j[1].deferred == d: + index = i + + if index >= 0: + del self.queue[index] + return True + + return False + + def work(self): + while True: + t, j = self.queue.get() + if not j: + break + else: + if t <= time(): + j.start() + else: + self.queue.put((t, j)) + break + + +class Job(): + def __init__(self, time, call, args=[], kwargs={}, deferred=None, threaded=True): + self.time = float(time) + self.call = call + self.args = args + self.kwargs = kwargs + self.deferred = deferred + self.threaded = threaded + + def run(self): + ret = self.call(*self.args, **self.kwargs) + if self.deferred is None: + return + else: + self.deferred.callback(ret) + + def start(self): + if self.threaded: + start_new_thread(self.run, ()) + else: + self.run() + + +class PriorityQueue(): + """ a non blocking priority queue """ + + def __init__(self): + self.queue = [] + self.lock = Lock() + + def __iter__(self): + return iter(self.queue) + + def __delitem__(self, key): + del self.queue[key] + + def put(self, element): + self.lock.acquire() + heappush(self.queue, element) + self.lock.release() + + def get(self): + """ return element or None """ + self.lock.acquire() + try: + el = heappop(self.queue) + return el + except IndexError: + return None, None + finally: + self.lock.release() \ No newline at end of file diff --git a/pyload/Setup.py b/pyload/Setup.py new file mode 100644 index 000000000..2ecd6ebe6 --- /dev/null +++ b/pyload/Setup.py @@ -0,0 +1,418 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +############################################################################### +# Copyright(c) 2008-2012 pyLoad Team +# http://www.pyload.org +# +# This file is part of pyLoad. +# pyLoad is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# Subjected to the terms and conditions in LICENSE +# +# @author: RaNaN +############################################################################### + +import pyload.utils.pylgettext as gettext +import os +import sys +import socket +import webbrowser + +from getpass import getpass +from time import time +from sys import exit + +from pyload.utils.fs import abspath, dirname, exists, join, makedirs +from pyload.utils import get_console_encoding +from pyload.web.ServerThread import WebServer + + +class Setup(): + """ + pyLoads initial setup configuration assistant + """ + + def __init__(self, path, config): + self.path = path + self.config = config + self.stdin_encoding = get_console_encoding(sys.stdin.encoding) + self.lang = None + # We will create a timestamp so that the setup will be completed in a specific interval + self.timestamp = time() + + # TODO: probably unneeded + self.yes = "yes" + self.no = "no" + + + def start(self): + web = WebServer(pysetup=self) + web.start() + + error = web.check_error() + if error: #todo errno 44 port already in use + print error + + url = "http://%s:%d/" % (socket.gethostbyname(socket.gethostname()), web.port) + + print "Setup is started" + + opened = webbrowser.open_new_tab(url) + if not opened: + print "Please point your browser to %s" % url + + + self.ask_lang() + + print "" + print _("Would you like to configure pyLoad via Webinterface?") + print _("You need a Browser and a connection to this PC for it.") + print _("Url would be: http://hostname:8000/") + viaweb = self.ask(_("Start initial webinterface for configuration?"), self.yes, bool=True) + if viaweb: + self.start_web() + else: + self.start_cli() + + + + def start_cli(self): + + + print _("Welcome to the pyLoad Configuration Assistent.") + print _("It will check your system and make a basic setup in order to run pyLoad.") + print "" + print _("The value in brackets [] always is the default value,") + print _("in case you don't want to change it or you are unsure what to choose, just hit enter.") + print _( + "Don't forget: You can always rerun this assistent with --setup or -s parameter, when you start pyLoadCore.") + print _("If you have any problems with this assistent hit STRG-C,") + print _("to abort and don't let him start with pyLoadCore automatically anymore.") + print "" + print _("When you are ready for system check, hit enter.") + raw_input() + + #self.get_page_next() + + + if len(avail) < 5: + print _("Features missing: ") + print + + if not self.check_module("Crypto"): + print _("no py-crypto available") + print _("You need this if you want to decrypt container files.") + print "" + + if not ssl: + print _("no SSL available") + print _("This is needed if you want to establish a secure connection to core or webinterface.") + print _("If you only want to access locally to pyLoad ssl is not useful.") + print "" + + if not captcha: + print _("no Captcha Recognition available") + print _("Only needed for some hosters and as freeuser.") + print "" + + if not js: + print _("no JavaScript engine found") + print _("You will need this for some Click'N'Load links. Install Spidermonkey, ossp-js, pyv8 or rhino") + + print _("You can abort the setup now and fix some dependencies if you want.") + + con = self.ask(_("Continue with setup?"), self.yes, bool=True) + + if not con: + return False + + print "" + print _("Do you want to change the config path? Current is %s") % abspath("") + print _( + "If you use pyLoad on a server or the home partition lives on an internal flash it may be a good idea to change it.") + path = self.ask(_("Change config path?"), self.no, bool=True) + if path: + self.conf_path() + #calls exit when changed + + print "" + print _("Do you want to configure login data and basic settings?") + print _("This is recommend for first run.") + con = self.ask(_("Make basic setup?"), self.yes, bool=True) + + if con: + self.conf_basic() + + if ssl: + print "" + print _("Do you want to configure ssl?") + ssl = self.ask(_("Configure ssl?"), self.no, bool=True) + if ssl: + self.conf_ssl() + + if web: + print "" + print _("Do you want to configure webinterface?") + web = self.ask(_("Configure webinterface?"), self.yes, bool=True) + if web: + self.conf_web() + + print "" + print _("Setup finished successfully.") + print _("Hit enter to exit and restart pyLoad") + raw_input() + return True + + + def start_web(self): + print "" + print _("Webinterface running for setup.") + # TODO start browser? + try: + from pyload.web import ServerThread + ServerThread.setup = self + from pyload.web import webinterface + webinterface.run_simple() + self.web = True + return True + except Exception, e: + print "Webinterface failed with this error: ", e + print "Falling back to commandline setup." + self.start_cli() + + + def conf_basic(self): + print "" + print _("## Basic Setup ##") + + print "" + print _("The following logindata is valid for CLI, GUI and webinterface.") + + from pyload.database import DatabaseBackend + + db = DatabaseBackend(None) + db.setup() + username = self.ask(_("Username"), "User") + password = self.ask("", "", password=True) + db.addUser(username, password) + db.shutdown() + + print "" + print _("External clients (GUI, CLI or other) need remote access to work over the network.") + print _("However, if you only want to use the webinterface you may disable it to save ram.") + self.config["remote"]["activated"] = self.ask(_("Enable remote access"), self.yes, bool=True) + + print "" + langs = self.config.getMetaData("general", "language") + self.config["general"]["language"] = self.ask(_("Language"), "en", langs.type.split(";")) + + self.config["general"]["download_folder"] = self.ask(_("Downloadfolder"), "Downloads") + self.config["download"]["max_downloads"] = self.ask(_("Max parallel downloads"), "3") + #print _("You should disable checksum proofing, if you have low hardware requirements.") + #self.config["general"]["checksum"] = self.ask(_("Proof checksum?"), "y", bool=True) + + reconnect = self.ask(_("Use Reconnect?"), self.no, bool=True) + self.config["reconnect"]["activated"] = reconnect + if reconnect: + self.config["reconnect"]["method"] = self.ask(_("Reconnect script location"), "./reconnect.sh") + + + def conf_web(self): + print "" + print _("## Webinterface Setup ##") + + print "" + self.config["webinterface"]["activated"] = self.ask(_("Activate webinterface?"), self.yes, bool=True) + print "" + print _("Listen address, if you use 127.0.0.1 or localhost, the webinterface will only accessible locally.") + self.config["webinterface"]["host"] = self.ask(_("Address"), "0.0.0.0") + self.config["webinterface"]["port"] = self.ask(_("Port"), "8000") + print "" + print _("pyLoad offers several server backends, now following a short explanation.") + print "threaded:", _("Default server, this server offers SSL and is a good alternative to builtin.") + print "fastcgi:", _( + "Can be used by apache, lighttpd, requires you to configure them, which is not too easy job.") + print "lightweight:", _("Very fast alternative written in C, requires libev and linux knowledge.") + print "\t", _("Get it from here: https://github.com/jonashaag/bjoern, compile it") + print "\t", _("and copy bjoern.so to pyload/lib") + + print + print _( + "Attention: In some rare cases the builtin server is not working, if you notice problems with the webinterface") + print _("come back here and change the builtin server to the threaded one here.") + + self.config["webinterface"]["server"] = self.ask(_("Server"), "threaded", + ["builtin", "threaded", "fastcgi", "lightweight"]) + + def conf_ssl(self): + print "" + print _("## SSL Setup ##") + print "" + print _("Execute these commands from pyLoad config folder to make ssl certificates:") + print "" + print "openssl genrsa -out ssl.key 1024" + print "openssl req -new -key ssl.key -out ssl.csr" + print "openssl req -days 36500 -x509 -key ssl.key -in ssl.csr > ssl.crt " + print "" + print _("If you're done and everything went fine, you can activate ssl now.") + self.config["ssl"]["activated"] = self.ask(_("Activate SSL?"), self.yes, bool=True) + + def set_user(self): + gettext.setpaths([join(os.sep, "usr", "share", "pyload", "locale"), None]) + translation = gettext.translation("setup", join(self.path, "locale"), + languages=[self.config["general"]["language"], "en"], fallback=True) + translation.install(True) + + from pyload.database import DatabaseBackend + + db = DatabaseBackend(None) + db.setup() + + noaction = True + try: + while True: + print _("Select action") + print _("1 - Create/Edit user") + print _("2 - List users") + print _("3 - Remove user") + print _("4 - Quit") + action = raw_input("[1]/2/3/4: ") + if not action in ("1", "2", "3", "4"): + continue + elif action == "1": + print "" + username = self.ask(_("Username"), "User") + password = self.ask("", "", password=True) + db.addUser(username, password) + noaction = False + elif action == "2": + print "" + print _("Users") + print "-----" + users = db.getAllUserData() + noaction = False + for user in users.itervalues(): + print user.name + print "-----" + print "" + elif action == "3": + print "" + username = self.ask(_("Username"), "") + if username: + db.removeUser(username) + noaction = False + elif action == "4": + db.syncSave() + break + finally: + if not noaction: + db.shutdown() + + def conf_path(self, trans=False): + if trans: + gettext.setpaths([join(os.sep, "usr", "share", "pyload", "locale"), None]) + translation = gettext.translation("setup", join(self.path, "locale"), + languages=[self.config["general"]["language"], "en"], fallback=True) + translation.install(True) + + print _("Setting new configpath, current configuration will not be transferred!") + path = self.ask(_("Configpath"), abspath("")) + try: + path = join(pypath, path) + if not exists(path): + makedirs(path) + f = open(join(pypath, "pyload", "config", "configdir"), "wb") + f.write(path) + f.close() + print _("Configpath changed, setup will now close, please restart to go on.") + print _("Press Enter to exit.") + raw_input() + exit() + except Exception, e: + print _("Setting config path failed: %s") % str(e) + + + def ask_lang(self): + langs = self.config.getMetaData("general", "language").type.split(";") + self.lang = self.ask(u"Choose your Language / Wähle deine Sprache", "en", langs) + gettext.setpaths([join(os.sep, "usr", "share", "pyload", "locale"), None]) + translation = gettext.translation("setup", join(self.path, "locale"), languages=[self.lang, "en"], fallback=True) + translation.install(True) + + #l10n Input shorthand for yes + self.yes = _("y") + #l10n Input shorthand for no + self.no = _("n") + + def ask(self, qst, default, answers=[], bool=False, password=False): + """ Generate dialog on command line """ + + if answers: + info = "(" + for i, answer in enumerate(answers): + info += (", " if i != 0 else "") + str((answer == default and "[%s]" % answer) or answer) + + info += ")" + elif bool: + if default == self.yes: + info = "([%s]/%s)" % (self.yes, self.no) + else: + info = "(%s/[%s])" % (self.yes, self.no) + else: + info = "[%s]" % default + + if password: + p1 = True + p2 = False + while p1 != p2: + # getpass(_("Password: ")) will crash on systems with broken locales (Win, NAS) + sys.stdout.write(_("Password: ")) + p1 = getpass("") + + if len(p1) < 4: + print _("Password too short. Use at least 4 symbols.") + continue + + sys.stdout.write(_("Password (again): ")) + p2 = getpass("") + + if p1 == p2: + return p1 + else: + print _("Passwords did not match.") + + while True: + input = raw_input(qst + " %s: " % info) + input = input.decode(self.stdin_encoding) + + if input.strip() == "": + input = default + + if bool: + #l10n yes, true,t are inputs for booleans with value true + if input.lower().strip() in [self.yes, _("yes"), _("true"), _("t"), "yes"]: + return True + #l10n no, false,f are inputs for booleans with value false + elif input.lower().strip() in [self.no, _("no"), _("false"), _("f"), "no"]: + return False + else: + print _("Invalid Input") + continue + + if not answers: + return input + + else: + if input in answers: + return input + else: + print _("Invalid Input") + + +if __name__ == "__main__": + test = Setup(join(abspath(dirname(__file__)), ".."), None) + test.start() diff --git a/pyload/__init__.py b/pyload/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pyload/api/AccountApi.py b/pyload/api/AccountApi.py new file mode 100644 index 000000000..999484974 --- /dev/null +++ b/pyload/api/AccountApi.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pyload.Api import Api, RequirePerm, Permission + +from ApiComponent import ApiComponent + + +class AccountApi(ApiComponent): + """ All methods to control accounts """ + + @RequirePerm(Permission.Accounts) + def getAccounts(self, refresh): + """Get information about all entered accounts. + + :param refresh: reload account info + :return: list of `AccountInfo` + """ + accs = self.core.accountManager.getAllAccounts(refresh) + accounts = [] + for plugin in accs.itervalues(): + accounts.extend([acc.toInfoData() for acc in plugin.values()]) + + return accounts + + @RequirePerm(Permission.All) + def getAccountTypes(self): + """All available account types. + + :return: string list + """ + return self.core.pluginManager.getPlugins("accounts").keys() + + @RequirePerm(Permission.Accounts) + def updateAccount(self, plugin, login, password): + """Changes pw/options for specific account.""" + # TODO: options + self.core.accountManager.updateAccount(plugin, login, password, {}) + + def updateAccountInfo(self, account): + """ Update account from :class:`AccountInfo` """ + #TODO + + @RequirePerm(Permission.Accounts) + def removeAccount(self, account): + """Remove account from pyload. + + :param account: :class:`ÀccountInfo` instance + """ + self.core.accountManager.removeAccount(account.plugin, account.loginname) + + +if Api.extend(AccountApi): + del AccountApi \ No newline at end of file diff --git a/pyload/api/AddonApi.py b/pyload/api/AddonApi.py new file mode 100644 index 000000000..4ae686d2d --- /dev/null +++ b/pyload/api/AddonApi.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pyload.Api import Api, RequirePerm, Permission + +from ApiComponent import ApiComponent + +class AddonApi(ApiComponent): + """ Methods to interact with addons """ + + def getAllInfo(self): + """Returns all information stored by addon plugins. Values are always strings + + :return: {"plugin": {"name": value } } + """ + return self.core.addonManager.getAllInfo() + + def getInfoByPlugin(self, plugin): + """Returns information stored by a specific plugin. + + :param plugin: pluginname + :return: dict of attr names mapped to value {"name": value} + """ + return self.core.addonManager.getInfo(plugin) + +if Api.extend(AddonApi): + del AddonApi \ No newline at end of file diff --git a/pyload/api/ApiComponent.py b/pyload/api/ApiComponent.py new file mode 100644 index 000000000..bb333c259 --- /dev/null +++ b/pyload/api/ApiComponent.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pyload.remote.apitypes import Iface + +# Workaround to let code-completion think, this is subclass of Iface +Iface = object +class ApiComponent(Iface): + + __slots__ = [] + + def __init__(self, core, user): + # Only for auto completion, this class can not be instantiated + from pyload import Core + from pyload.datatypes.User import User + assert isinstance(core, Core) + assert issubclass(ApiComponent, Iface) + self.core = core + assert isinstance(user, User) + self.user = user + self.primaryUID = 0 + # No instantiating! + raise Exception() \ No newline at end of file diff --git a/pyload/api/CollectorApi.py b/pyload/api/CollectorApi.py new file mode 100644 index 000000000..49340285e --- /dev/null +++ b/pyload/api/CollectorApi.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pyload.Api import Api, RequirePerm, Permission + +from ApiComponent import ApiComponent + +class CollectorApi(ApiComponent): + """ Link collector """ + + @RequirePerm(Permission.All) + def getCollector(self): + pass + + @RequirePerm(Permission.Add) + def addToCollector(self, links): + pass + + @RequirePerm(Permission.Add) + def addFromCollector(self, name, new_name): + pass + + @RequirePerm(Permission.Delete) + def deleteCollPack(self, name): + pass + + @RequirePerm(Permission.Add) + def renameCollPack(self, name, new_name): + pass + + @RequirePerm(Permission.Delete) + def deleteCollLink(self, url): + pass + + +if Api.extend(CollectorApi): + del CollectorApi \ No newline at end of file diff --git a/pyload/api/ConfigApi.py b/pyload/api/ConfigApi.py new file mode 100644 index 000000000..82cfdd418 --- /dev/null +++ b/pyload/api/ConfigApi.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pyload.Api import Api, RequirePerm, Permission, ConfigHolder, ConfigItem, ConfigInfo +from pyload.utils import to_string + +from ApiComponent import ApiComponent + +# helper function to create a ConfigHolder +def toConfigHolder(section, config, values): + holder = ConfigHolder(section, config.name, config.description, config.long_desc) + holder.items = [ConfigItem(option, x.name, x.description, x.type, to_string(x.default), + to_string(values.get(option, x.default))) for option, x in + config.config.iteritems()] + return holder + + +class ConfigApi(ApiComponent): + """ Everything related to configuration """ + + def getConfigValue(self, section, option): + """Retrieve config value. + + :param section: name of category, or plugin + :param option: config option + :rtype: str + :return: config value as string + """ + value = self.core.config.get(section, option, self.primaryUID) + return to_string(value) + + def setConfigValue(self, section, option, value): + """Set new config value. + + :param section: + :param option: + :param value: new config value + """ + if option in ("limit_speed", "max_speed"): #not so nice to update the limit + self.core.requestFactory.updateBucket() + + self.core.config.set(section, option, value, self.primaryUID) + + def getConfig(self): + """Retrieves complete config of core. + + :rtype: dict of section -> ConfigHolder + """ + data = {} + for section, config, values in self.core.config.iterCoreSections(): + data[section] = toConfigHolder(section, config, values) + return data + + def getCoreConfig(self): + """ Retrieves core config sections + + :rtype: list of PluginInfo + """ + return [ConfigInfo(section, config.name, config.description, False, False) + for section, config, values in self.core.config.iterCoreSections()] + + @RequirePerm(Permission.Plugins) + def getPluginConfig(self): + """All plugins and addons the current user has configured + + :rtype: list of PluginInfo + """ + # TODO: include addons that are activated by default + # TODO: multi user + # TODO: better plugin / addon activated config + data = [] + active = [x.getName() for x in self.core.addonManager.activePlugins()] + for name, config, values in self.core.config.iterSections(self.primaryUID): + # skip unmodified and inactive addons + if not values and name not in active: continue + + item = ConfigInfo(name, config.name, config.description, + self.core.pluginManager.getCategory(name), + self.core.pluginManager.isUserPlugin(name), + values.get("activated", None if "activated" not in config.config else config.config[ + "activated"].default)) + data.append(item) + + return data + + @RequirePerm(Permission.Plugins) + def getAvailablePlugins(self): + """List of all available plugins, that are configurable + + :rtype: list of PluginInfo + """ + # TODO: filter user_context / addons when not allowed + plugins = [ConfigInfo(name, config.name, config.description, + self.core.pluginManager.getCategory(name), + self.core.pluginManager.isUserPlugin(name)) + for name, config, values in self.core.config.iterSections(self.primaryUID)] + + return plugins + + @RequirePerm(Permission.Plugins) + def loadConfig(self, name): + """Get complete config options for desired section + + :param name: Name of plugin or config section + :rtype: ConfigHolder + """ + # requires at least plugin permissions, but only admin can load core config + config, values = self.core.config.getSection(name, self.primaryUID) + return toConfigHolder(name, config, values) + + + @RequirePerm(Permission.Plugins) + def saveConfig(self, config): + """Used to save a configuration, core config can only be saved by admins + + :param config: :class:`ConfigHolder` + """ + for item in config.items: + self.core.config.set(config.name, item.name, item.value, sync=False, user=self.primaryUID) + # save the changes + self.core.config.saveValues(self.primaryUID, config.name) + + @RequirePerm(Permission.Plugins) + def deleteConfig(self, plugin): + """Deletes modified config + + :param plugin: plugin name + """ + #TODO: delete should deactivate addons? + self.core.config.delete(plugin, self.primaryUID) + + +if Api.extend(ConfigApi): + del ConfigApi \ No newline at end of file diff --git a/pyload/api/CoreApi.py b/pyload/api/CoreApi.py new file mode 100644 index 000000000..ebb194134 --- /dev/null +++ b/pyload/api/CoreApi.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pyload.Api import Api, RequirePerm, Permission, ServerStatus, Interaction +from pyload.utils.fs import join, free_space +from pyload.utils import compare_time + +from ApiComponent import ApiComponent + +class CoreApi(ApiComponent): + """ This module provides methods for general interaction with the core, like status or progress retrieval """ + + @RequirePerm(Permission.All) + def getServerVersion(self): + """pyLoad Core version """ + return self.core.version + + @RequirePerm(Permission.All) + def getWSAddress(self): + """Gets and address for the websocket based on configuration""" + # TODO SSL (wss) + return "ws://%%s:%d" % self.core.config['remote']['port'] + + @RequirePerm(Permission.All) + def getServerStatus(self): + """Some general information about the current status of pyLoad. + + :return: `ServerStatus` + """ + queue = self.core.files.getQueueStats(self.primaryUID) + total = self.core.files.getDownloadStats(self.primaryUID) + + serverStatus = ServerStatus(0, + total[0], queue[0], + total[1], queue[1], + self.isInteractionWaiting(Interaction.All), + not self.core.threadManager.pause and self.isTimeDownload(), + self.core.threadManager.pause, + self.core.config['reconnect']['activated'] and self.isTimeReconnect()) + + + for pyfile in self.core.threadManager.getActiveDownloads(self.primaryUID): + serverStatus.speed += pyfile.getSpeed() #bytes/s + + return serverStatus + + @RequirePerm(Permission.All) + def getProgressInfo(self): + """ Status of all currently running tasks + + :rtype: list of :class:`ProgressInfo` + """ + return self.core.threadManager.getProgressList(self.primaryUID) + + def pauseServer(self): + """Pause server: It won't start any new downloads, but nothing gets aborted.""" + self.core.threadManager.pause = True + + def unpauseServer(self): + """Unpause server: New Downloads will be started.""" + self.core.threadManager.pause = False + + def togglePause(self): + """Toggle pause state. + + :return: new pause state + """ + self.core.threadManager.pause ^= True + return self.core.threadManager.pause + + def toggleReconnect(self): + """Toggle reconnect activation. + + :return: new reconnect state + """ + self.core.config["reconnect"]["activated"] ^= True + return self.core.config["reconnect"]["activated"] + + def freeSpace(self): + """Available free space at download directory in bytes""" + return free_space(self.core.config["general"]["download_folder"]) + + + def quit(self): + """Clean way to quit pyLoad""" + self.core.do_kill = True + + def restart(self): + """Restart pyload core""" + self.core.do_restart = True + + def getLog(self, offset=0): + """Returns most recent log entries. + + :param offset: line offset + :return: List of log entries + """ + filename = join(self.core.config['log']['log_folder'], 'log.txt') + try: + fh = open(filename, "r") + lines = fh.readlines() + fh.close() + if offset >= len(lines): + return [] + return lines[offset:] + except: + return ['No log available'] + + @RequirePerm(Permission.All) + def isTimeDownload(self): + """Checks if pyload will start new downloads according to time in config. + + :return: bool + """ + start = self.core.config['downloadTime']['start'].split(":") + end = self.core.config['downloadTime']['end'].split(":") + return compare_time(start, end) + + @RequirePerm(Permission.All) + def isTimeReconnect(self): + """Checks if pyload will try to make a reconnect + + :return: bool + """ + start = self.core.config['reconnect']['startTime'].split(":") + end = self.core.config['reconnect']['endTime'].split(":") + return compare_time(start, end) and self.core.config["reconnect"]["activated"] + + +if Api.extend(CoreApi): + del CoreApi \ No newline at end of file diff --git a/pyload/api/DownloadApi.py b/pyload/api/DownloadApi.py new file mode 100644 index 000000000..0a01007b5 --- /dev/null +++ b/pyload/api/DownloadApi.py @@ -0,0 +1,182 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from os.path import isabs + +from pyload.Api import Api, RequirePerm, Permission +from pyload.utils.fs import join + +from ApiComponent import ApiComponent + +class DownloadApi(ApiComponent): + """ Component to create, add, delete or modify downloads.""" + + @RequirePerm(Permission.Add) + def generateAndAddPackages(self, links, paused=False): + """Generates and add packages + + :param links: list of urls + :param paused: paused package + :return: list of package ids + """ + return [self.addPackageP(name, urls, "", paused) for name, urls + in self.generatePackages(links).iteritems()] + + @RequirePerm(Permission.Add) + def createPackage(self, name, folder, root, password="", site="", comment="", paused=False): + """Create a new package. + + :param name: display name of the package + :param folder: folder name or relative path, abs path are not allowed + :param root: package id of root package, -1 for top level package + :param password: single pw or list of passwords separated with new line + :param site: arbitrary url to site for more information + :param comment: arbitrary comment + :param paused: No downloads will be started when True + :return: pid of newly created package + """ + + if isabs(folder): + folder = folder.replace("/", "_") + + folder = folder.replace("http://", "").replace(":", "").replace("\\", "_").replace("..", "") + + self.core.log.info(_("Added package %(name)s as folder %(folder)s") % {"name": name, "folder": folder}) + pid = self.core.files.addPackage(name, folder, root, password, site, comment, paused) + + return pid + + + @RequirePerm(Permission.Add) + def addPackage(self, name, links, password=""): + """Convenient method to add a package to the top-level and for adding links. + + :return: package id + """ + return self.addPackageChild(name, links, password, -1, False) + + @RequirePerm(Permission.Add) + def addPackageP(self, name, links, password, paused): + """ Same as above with additional paused attribute. """ + return self.addPackageChild(name, links, password, -1, paused) + + @RequirePerm(Permission.Add) + def addPackageChild(self, name, links, password, root, paused): + """Adds a package, with links to desired package. + + :param root: parents package id + :return: package id of the new package + """ + if self.core.config['general']['folder_per_package']: + folder = name + else: + folder = "" + + pid = self.createPackage(name, folder, root, password) + self.addLinks(pid, links) + + return pid + + @RequirePerm(Permission.Add) + def addLinks(self, pid, links): + """Adds links to specific package. Initiates online status fetching. + + :param pid: package id + :param links: list of urls + """ + hoster, crypter = self.core.pluginManager.parseUrls(links) + + if hoster: + self.core.files.addLinks(hoster, pid) + self.core.threadManager.createInfoThread(hoster, pid) + + self.core.threadManager.createDecryptThread(crypter, pid) + + self.core.log.info((_("Added %d links to package") + " #%d" % pid) % len(hoster)) + self.core.files.save() + + @RequirePerm(Permission.Add) + def uploadContainer(self, filename, data): + """Uploads and adds a container file to pyLoad. + + :param filename: filename, extension is important so it can correctly decrypted + :param data: file content + """ + th = open(join(self.core.config["general"]["download_folder"], "tmp_" + filename), "wb") + th.write(str(data)) + th.close() + + return self.addPackage(th.name, [th.name]) + + @RequirePerm(Permission.Delete) + def deleteFiles(self, fids): + """Deletes several file entries from pyload. + + :param fids: list of file ids + """ + for fid in fids: + self.core.files.deleteFile(fid) + + self.core.files.save() + + @RequirePerm(Permission.Delete) + def deletePackages(self, pids): + """Deletes packages and containing links. + + :param pids: list of package ids + """ + for pid in pids: + self.core.files.deletePackage(pid) + + self.core.files.save() + + + @RequirePerm(Permission.Modify) + def restartPackage(self, pid): + """Restarts a package, resets every containing files. + + :param pid: package id + """ + self.core.files.restartPackage(pid) + + @RequirePerm(Permission.Modify) + def restartFile(self, fid): + """Resets file status, so it will be downloaded again. + + :param fid: file id + """ + self.core.files.restartFile(fid) + + @RequirePerm(Permission.Modify) + def recheckPackage(self, pid): + """Check online status of all files in a package, also a default action when package is added. """ + self.core.files.reCheckPackage(pid) + + @RequirePerm(Permission.Modify) + def restartFailed(self): + """Restarts all failed failes.""" + self.core.files.restartFailed() + + @RequirePerm(Permission.Modify) + def stopAllDownloads(self): + """Aborts all running downloads.""" + + pyfiles = self.core.files.cachedFiles() + for pyfile in pyfiles: + pyfile.abortDownload() + + @RequirePerm(Permission.Modify) + def stopDownloads(self, fids): + """Aborts specific downloads. + + :param fids: list of file ids + :return: + """ + pyfiles = self.core.files.cachedFiles() + for pyfile in pyfiles: + if pyfile.id in fids: + pyfile.abortDownload() + + +if Api.extend(DownloadApi): + del DownloadApi \ No newline at end of file diff --git a/pyload/api/DownloadPreparingApi.py b/pyload/api/DownloadPreparingApi.py new file mode 100644 index 000000000..0a47fe5ab --- /dev/null +++ b/pyload/api/DownloadPreparingApi.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from itertools import chain + +from pyload.Api import Api, RequirePerm, Permission, OnlineCheck, LinkStatus, urlmatcher +from pyload.utils.fs import join +from pyload.utils.packagetools import parseNames +from pyload.network.RequestFactory import getURL + +from ApiComponent import ApiComponent + +class DownloadPreparingApi(ApiComponent): + """ All kind of methods to parse links or retrieve online status """ + + @RequirePerm(Permission.Add) + def parseURLs(self, html=None, url=None): + """Parses html content or any arbitrary text for links and returns result of `checkURLs` + + :param html: html source + :return: + """ + urls = [] + + if html: + urls += [x[0] for x in urlmatcher.findall(html)] + + if url: + page = getURL(url) + urls += [x[0] for x in urlmatcher.findall(page)] + + # remove duplicates + return self.checkURLs(set(urls)) + + + @RequirePerm(Permission.Add) + def checkURLs(self, urls): + """ Gets urls and returns pluginname mapped to list of matching urls. + + :param urls: + :return: {plugin: urls} + """ + data, crypter = self.core.pluginManager.parseUrls(urls) + plugins = {} + + for url, plugin in chain(data, crypter): + if plugin in plugins: + plugins[plugin].append(url) + else: + plugins[plugin] = [url] + + return plugins + + @RequirePerm(Permission.Add) + def checkOnlineStatus(self, urls): + """ initiates online status check, will also decrypt files. + + :param urls: + :return: initial set of data as :class:`OnlineCheck` instance containing the result id + """ + data, crypter = self.core.pluginManager.parseUrls(urls) + + # initial result does not contain the crypter links + tmp = [(url, (url, LinkStatus(url, pluginname, "unknown", 3, 0))) for url, pluginname in data] + data = parseNames(tmp) + result = {} + + for k, v in data.iteritems(): + for url, status in v: + status.packagename = k + result[url] = status + + data.update(crypter) # hoster and crypter will be processed + rid = self.core.threadManager.createResultThread(data, False) + + return OnlineCheck(rid, result) + + @RequirePerm(Permission.Add) + def checkOnlineStatusContainer(self, urls, container, data): + """ checks online status of urls and a submitted container file + + :param urls: list of urls + :param container: container file name + :param data: file content + :return: :class:`OnlineCheck` + """ + th = open(join(self.core.config["general"]["download_folder"], "tmp_" + container), "wb") + th.write(str(data)) + th.close() + urls.append(th.name) + return self.checkOnlineStatus(urls) + + @RequirePerm(Permission.Add) + def pollResults(self, rid): + """ Polls the result available for ResultID + + :param rid: `ResultID` + :return: `OnlineCheck`, if rid is -1 then there is no more data available + """ + result = self.core.threadManager.getInfoResult(rid) + + if "ALL_INFO_FETCHED" in result: + del result["ALL_INFO_FETCHED"] + return OnlineCheck(-1, result) + else: + return OnlineCheck(rid, result) + + + @RequirePerm(Permission.Add) + def generatePackages(self, links): + """ Parses links, generates packages names from urls + + :param links: list of urls + :return: package names mapped to urls + """ + result = parseNames((x, x) for x in links) + return result + + +if Api.extend(DownloadPreparingApi): + del DownloadPreparingApi \ No newline at end of file diff --git a/pyload/api/FileApi.py b/pyload/api/FileApi.py new file mode 100644 index 000000000..2ca409165 --- /dev/null +++ b/pyload/api/FileApi.py @@ -0,0 +1,169 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pyload.Api import Api, RequirePerm, Permission, DownloadState, PackageDoesNotExists, FileDoesNotExists +from pyload.utils import uniqify + +from ApiComponent import ApiComponent + +# TODO: user context +class FileApi(ApiComponent): + """Everything related to available packages or files. Deleting, Modifying and so on.""" + + @RequirePerm(Permission.All) + def getAllFiles(self): + """ same as `getFileTree` for toplevel root and full tree""" + return self.getFileTree(-1, True) + + @RequirePerm(Permission.All) + def getFilteredFiles(self, state): + """ same as `getFilteredFileTree` for toplevel root and full tree""" + return self.getFilteredFileTree(-1, state, True) + + @RequirePerm(Permission.All) + def getFileTree(self, pid, full): + """ Retrieve data for specific package. full=True will retrieve all data available + and can result in greater delays. + + :param pid: package id + :param full: go down the complete tree or only the first layer + :return: :class:`TreeCollection` + """ + return self.core.files.getTree(pid, full, DownloadState.All) + + @RequirePerm(Permission.All) + def getFilteredFileTree(self, pid, full, state): + """ Same as `getFileTree` but only contains files with specific download state. + + :param pid: package id + :param full: go down the complete tree or only the first layer + :param state: :class:`DownloadState`, the attributes used for filtering + :return: :class:`TreeCollection` + """ + return self.core.files.getTree(pid, full, state) + + @RequirePerm(Permission.All) + def getPackageContent(self, pid): + """ Only retrieve content of a specific package. see `getFileTree`""" + return self.getFileTree(pid, False) + + @RequirePerm(Permission.All) + def getPackageInfo(self, pid): + """Returns information about package, without detailed information about containing files + + :param pid: package id + :raises PackageDoesNotExists: + :return: :class:`PackageInfo` + """ + info = self.core.files.getPackageInfo(pid) + if not info: + raise PackageDoesNotExists(pid) + return info + + @RequirePerm(Permission.All) + def getFileInfo(self, fid): + """ Info for specific file + + :param fid: file id + :raises FileDoesNotExists: + :return: :class:`FileInfo` + + """ + info = self.core.files.getFileInfo(fid) + if not info: + raise FileDoesNotExists(fid) + return info + + @RequirePerm(Permission.Download) + def getFilePath(self, fid): + """ Internal method to get the filepath""" + info = self.getFileInfo(fid) + pack = self.core.files.getPackage(info.package) + return pack.getPath(), info.name + + + @RequirePerm(Permission.All) + def findFiles(self, pattern): + return self.core.files.getTree(-1, True, DownloadState.All, pattern) + + @RequirePerm(Permission.All) + def searchSuggestions(self, pattern): + names = self.core.db.getMatchingFilenames(pattern, self.primaryUID) + # TODO: stemming and reducing the names to provide better suggestions + return uniqify(names) + + @RequirePerm(Permission.All) + def findPackages(self, tags): + pass + + @RequirePerm(Permission.Modify) + def updatePackage(self, pack): + """Allows to modify several package attributes. + + :param pid: package id + :param data: :class:`PackageInfo` + """ + pid = pack.pid + p = self.core.files.getPackage(pid) + if not p: raise PackageDoesNotExists(pid) + + #TODO: fix + for key, value in data.iteritems(): + if key == "id": continue + setattr(p, key, value) + + p.sync() + self.core.files.save() + + @RequirePerm(Permission.Modify) + def setPackageFolder(self, pid, path): + pass + + @RequirePerm(Permission.Modify) + def movePackage(self, pid, root): + """ Set a new root for specific package. This will also moves the files on disk\ + and will only work when no file is currently downloading. + + :param pid: package id + :param root: package id of new root + :raises PackageDoesNotExists: When pid or root is missing + :return: False if package can't be moved + """ + return self.core.files.movePackage(pid, root) + + @RequirePerm(Permission.Modify) + def moveFiles(self, fids, pid): + """Move multiple files to another package. This will move the files on disk and\ + only work when files are not downloading. All files needs to be continuous ordered + in the current package. + + :param fids: list of file ids + :param pid: destination package + :return: False if files can't be moved + """ + return self.core.files.moveFiles(fids, pid) + + @RequirePerm(Permission.Modify) + def orderPackage(self, pid, position): + """Set new position for a package. + + :param pid: package id + :param position: new position, 0 for very beginning + """ + self.core.files.orderPackage(pid, position) + + @RequirePerm(Permission.Modify) + def orderFiles(self, fids, pid, position): + """ Set a new position for a bunch of files within a package. + All files have to be in the same package and must be **continuous**\ + in the package. That means no gaps between them. + + :param fids: list of file ids + :param pid: package id of parent package + :param position: new position: 0 for very beginning + """ + self.core.files.orderFiles(fids, pid, position) + + +if Api.extend(FileApi): + del FileApi \ No newline at end of file diff --git a/pyload/api/UserInteractionApi.py b/pyload/api/UserInteractionApi.py new file mode 100644 index 000000000..f5a9e9290 --- /dev/null +++ b/pyload/api/UserInteractionApi.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pyload.Api import Api, RequirePerm, Permission, Interaction + +from ApiComponent import ApiComponent + +class UserInteractionApi(ApiComponent): + """ Everything needed for user interaction """ + + @RequirePerm(Permission.Interaction) + def isInteractionWaiting(self, mode): + """ Check if task is waiting. + + :param mode: binary or'ed output type + :return: boolean + """ + return self.core.interactionManager.isTaskWaiting(self.primaryUID, mode) + + @RequirePerm(Permission.Interaction) + def getInteractionTasks(self, mode): + """Retrieve task for specific mode. + + :param mode: binary or'ed interaction types which should be retrieved + :rtype list of :class:`InteractionTask` + """ + tasks = self.core.interactionManager.getTasks(self.primaryUID, mode) + # retrieved tasks count as seen + for t in tasks: + t.seen = True + if t.type == Interaction.Notification: + t.setWaiting(self.core.interactionManager.CLIENT_THRESHOLD) + + return tasks + + @RequirePerm(Permission.Interaction) + def setInteractionResult(self, iid, result): + """Set Result for a interaction task. It will be immediately removed from task queue afterwards + + :param iid: interaction id + :param result: result as json string + """ + task = self.core.interactionManager.getTaskByID(iid) + if task and self.primaryUID == task.owner: + task.setResult(result) + + @RequirePerm(Permission.Interaction) + def getAddonHandler(self): + pass + + @RequirePerm(Permission.Interaction) + def callAddonHandler(self, plugin, func, pid_or_fid): + pass + + @RequirePerm(Permission.Download) + def generateDownloadLink(self, fid, timeout): + pass + + +if Api.extend(UserInteractionApi): + del UserInteractionApi \ No newline at end of file diff --git a/pyload/api/__init__.py b/pyload/api/__init__.py new file mode 100644 index 000000000..1348fd26f --- /dev/null +++ b/pyload/api/__init__.py @@ -0,0 +1,8 @@ +__all__ = ["CoreApi", "ConfigApi", "DownloadApi", "DownloadPreparingApi", "FileApi", + "CollectorApi", "UserInteractionApi", "AccountApi", "AddonApi"] + +# Import all components +# from .import * +# Above does not work in py 2.5 +for name in __all__: + __import__(__name__ + "." + name) \ No newline at end of file diff --git a/pyload/cli/AddPackage.py b/pyload/cli/AddPackage.py new file mode 100644 index 000000000..a73401586 --- /dev/null +++ b/pyload/cli/AddPackage.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +#Copyright (C) 2011 RaNaN +# +#This program is free software; you can redistribute it and/or modify +#it under the terms of the GNU General Public License as published by +#the Free Software Foundation; either version 3 of the License, +#or (at your option) any later version. +# +#This program is distributed in the hope that it will be useful, +#but WITHOUT ANY WARRANTY; without even the implied warranty of +#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +#See the GNU General Public License for more details. +# +#You should have received a copy of the GNU General Public License +# along with this program; if not, see . +# +### + +from Handler import Handler +from printer import * + +class AddPackage(Handler): + """ let the user add packages """ + + def init(self): + self.name = "" + self.urls = [] + + def onEnter(self, inp): + if inp == "0": + self.cli.reset() + + if not self.name: + self.name = inp + self.setInput() + elif inp == "END": + #add package + self.client.addPackage(self.name, self.urls, 1) + self.cli.reset() + else: + if inp.strip(): + self.urls.append(inp) + self.setInput() + + def renderBody(self, line): + println(line, white(_("Add Package:"))) + println(line + 1, "") + line += 2 + + if not self.name: + println(line, _("Enter a name for the new package")) + println(line + 1, "") + line += 2 + else: + println(line, _("Package: %s") % self.name) + println(line + 1, _("Parse the links you want to add.")) + println(line + 2, _("Type %s when done.") % mag("END")) + println(line + 3, _("Links added: ") + mag(len(self.urls))) + line += 4 + + println(line, "") + println(line + 1, mag("0.") + _(" back to main menu")) + + return line + 2 \ No newline at end of file diff --git a/pyload/cli/Handler.py b/pyload/cli/Handler.py new file mode 100644 index 000000000..476d09386 --- /dev/null +++ b/pyload/cli/Handler.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +#Copyright (C) 2011 RaNaN +# +#This program is free software; you can redistribute it and/or modify +#it under the terms of the GNU General Public License as published by +#the Free Software Foundation; either version 3 of the License, +#or (at your option) any later version. +# +#This program is distributed in the hope that it will be useful, +#but WITHOUT ANY WARRANTY; without even the implied warranty of +#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +#See the GNU General Public License for more details. +# +#You should have received a copy of the GNU General Public License +# along with this program; if not, see . +# +### +class Handler: + def __init__(self, cli): + self.cli = cli + self.init() + + client = property(lambda self: self.cli.client) + input = property(lambda self: self.cli.input) + + def init(self): + pass + + def onChar(self, char): + pass + + def onBackSpace(self): + pass + + def onEnter(self, inp): + pass + + def setInput(self, inp=""): + self.cli.setInput(inp) + + def backspace(self): + self.cli.setInput(self.input[:-1]) + + def renderBody(self, line): + """ gets the line where to render output and should return the line number below its content """ + return line + 1 \ No newline at end of file diff --git a/pyload/cli/ManageFiles.py b/pyload/cli/ManageFiles.py new file mode 100644 index 000000000..2304af355 --- /dev/null +++ b/pyload/cli/ManageFiles.py @@ -0,0 +1,204 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +#Copyright (C) 2011 RaNaN +# +#This program is free software; you can redistribute it and/or modify +#it under the terms of the GNU General Public License as published by +#the Free Software Foundation; either version 3 of the License, +#or (at your option) any later version. +# +#This program is distributed in the hope that it will be useful, +#but WITHOUT ANY WARRANTY; without even the implied warranty of +#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +#See the GNU General Public License for more details. +# +#You should have received a copy of the GNU General Public License +# along with this program; if not, see . +# +### + +from itertools import islice +from time import time + +from Handler import Handler +from printer import * + +from pyload.Api import Destination, PackageData + +class ManageFiles(Handler): + """ possibility to manage queue/collector """ + + def init(self): + self.target = Destination.Queue + self.pos = 0 #position in queue + self.package = -1 #chosen package + self.mode = "" # move/delete/restart + + self.cache = None + self.links = None + self.time = 0 + + def onChar(self, char): + if char in ("m", "d", "r"): + self.mode = char + self.setInput() + elif char == "p": + self.pos = max(0, self.pos - 5) + self.backspace() + elif char == "n": + self.pos += 5 + self.backspace() + + def onBackSpace(self): + if not self.input and self.mode: + self.mode = "" + if not self.input and self.package > -1: + self.package = -1 + + def onEnter(self, input): + if input == "0": + self.cli.reset() + elif self.package < 0 and self.mode: + #mode select + packs = self.parseInput(input) + if self.mode == "m": + [self.client.movePackage((self.target + 1) % 2, x) for x in packs] + elif self.mode == "d": + self.client.deletePackages(packs) + elif self.mode == "r": + [self.client.restartPackage(x) for x in packs] + + elif self.mode: + #edit links + links = self.parseInput(input, False) + + if self.mode == "d": + self.client.deleteFiles(links) + elif self.mode == "r": + map(self.client.restartFile, links) + + else: + #look into package + try: + self.package = int(input) + except: + pass + + self.cache = None + self.links = None + self.pos = 0 + self.mode = "" + self.setInput() + + + def renderBody(self, line): + if self.package < 0: + println(line, white(_("Manage Packages:"))) + else: + println(line, white((_("Manage Links:")))) + line += 1 + + if self.mode: + if self.mode == "m": + println(line, _("What do you want to move?")) + elif self.mode == "d": + println(line, _("What do you want to delete?")) + elif self.mode == "r": + println(line, _("What do you want to restart?")) + + println(line + 1, "Enter a single number, comma separated numbers or ranges. e.g.: 1,2,3 or 1-3.") + line += 2 + else: + println(line, _("Choose what you want to do, or enter package number.")) + println(line + 1, ("%s - %%s, %s - %%s, %s - %%s" % (mag("d"), mag("m"), mag("r"))) % ( + _("delete"), _("move"), _("restart"))) + line += 2 + + if self.package < 0: + #print package info + pack = self.getPackages() + i = 0 + for value in islice(pack, self.pos, self.pos + 5): + try: + println(line, mag(str(value.pid)) + ": " + value.name) + line += 1 + i += 1 + except Exception, e: + pass + for x in range(5 - i): + println(line, "") + line += 1 + else: + #print links info + pack = self.getLinks() + i = 0 + for value in islice(pack.links, self.pos, self.pos + 5): + try: + println(line, mag(value.fid) + ": %s | %s | %s" % ( + value.name, value.statusmsg, value.plugin)) + line += 1 + i += 1 + except Exception, e: + pass + for x in range(5 - i): + println(line, "") + line += 1 + + println(line, mag("p") + _(" - previous") + " | " + mag("n") + _(" - next")) + println(line + 1, mag("0.") + _(" back to main menu")) + + return line + 2 + + + def getPackages(self): + if self.cache and self.time + 2 < time(): + return self.cache + + if self.target == Destination.Queue: + data = self.client.getQueue() + else: + data = self.client.getCollector() + + + self.cache = data + self.time = time() + + return data + + def getLinks(self): + if self.links and self.time + 1 < time(): + return self.links + + try: + data = self.client.getPackageData(self.package) + except: + data = PackageData(links=[]) + + self.links = data + self.time = time() + + return data + + def parseInput(self, inp, package=True): + inp = inp.strip() + if "-" in inp: + l, n, h = inp.partition("-") + l = int(l) + h = int(h) + r = range(l, h + 1) + + ret = [] + if package: + for p in self.cache: + if p.pid in r: + ret.append(p.pid) + else: + for l in self.links.links: + if l.lid in r: + ret.append(l.lid) + + return ret + + else: + return [int(x) for x in inp.split(",")] diff --git a/pyload/cli/__init__.py b/pyload/cli/__init__.py new file mode 100644 index 000000000..fa8a09291 --- /dev/null +++ b/pyload/cli/__init__.py @@ -0,0 +1,2 @@ +from AddPackage import AddPackage +from ManageFiles import ManageFiles \ No newline at end of file diff --git a/pyload/cli/printer.py b/pyload/cli/printer.py new file mode 100644 index 000000000..c62c1800e --- /dev/null +++ b/pyload/cli/printer.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +def blue(string): + return "\033[1;34m" + unicode(string) + "\033[0m" + +def green(string): + return "\033[1;32m" + unicode(string) + "\033[0m" + +def yellow(string): + return "\033[1;33m" + unicode(string) + "\033[0m" + +def red(string): + return "\033[1;31m" + unicode(string) + "\033[0m" + +def cyan(string): + return "\033[1;36m" + unicode(string) + "\033[0m" + +def mag(string): + return "\033[1;35m" + unicode(string) + "\033[0m" + +def white(string): + return "\033[1;37m" + unicode(string) + "\033[0m" + +def println(line, content): + print "\033[" + str(line) + ";0H\033[2K" + content \ No newline at end of file diff --git a/pyload/config/ConfigManager.py b/pyload/config/ConfigManager.py new file mode 100644 index 000000000..f9dc3795b --- /dev/null +++ b/pyload/config/ConfigManager.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from new_collections import OrderedDict + +from pyload.Api import InvalidConfigSection +from pyload.utils import from_string, json + +from ConfigParser import ConfigParser + + +def convertKeyError(func): + """ converts KeyError into InvalidConfigSection """ + + def conv(*args, **kwargs): + try: + return func(*args, **kwargs) + except KeyError: + raise InvalidConfigSection(args[1]) + + return conv + + +class ConfigManager(ConfigParser): + """ Manages the core config and configs for addons and single user. + Has similar interface to ConfigParser. """ + + def __init__(self, core, parser): + # No __init__ call to super class is needed! + + self.core = core + self.db = core.db + # The config parser, holding the core config + self.parser = parser + + # similar to parser, separated meta data and values + self.config = OrderedDict() + + # Value cache for multiple user configs + # Values are populated from db on first access + # Entries are saved as (user, section) keys + self.values = {} + # TODO: similar to a cache, could be deleted periodically + + def save(self): + self.parser.save() + + @convertKeyError + def get(self, section, option, user=None): + """get config value, core config only available for admins. + if user is not valid default value will be returned""" + + # Core config loaded from parser, when no user is given or he is admin + if section in self.parser and user is None: + return self.parser.get(section, option) + else: + # We need the id and not the instance + # Will be None for admin user and so the same as internal access + try: + # Check if this config exists + # Configs without meta data can not be loaded! + data = self.config[section].config[option] + return self.loadValues(user, section)[option] + except KeyError: + pass # Returns default value later + + return self.config[section].config[option].default + + def loadValues(self, user, section): + if (user, section) not in self.values: + conf = self.db.loadConfig(section, user) + try: + self.values[user, section] = json.loads(conf) if conf else {} + except ValueError: # Something did go wrong when parsing + self.values[user, section] = {} + self.core.print_exc() + + return self.values[user, section] + + @convertKeyError + def set(self, section, option, value, sync=True, user=None): + """ set config value """ + + changed = False + if section in self.parser and user is None: + changed = self.parser.set(section, option, value, sync) + else: + data = self.config[section].config[option] + value = from_string(value, data.type) + old_value = self.get(section, option) + + # Values will always be saved to db, sync is ignored + if value != old_value: + changed = True + self.values[user, section][option] = value + if sync: self.saveValues(user, section) + + if changed: self.core.evm.dispatchEvent("config:changed", section, option, value) + return changed + + def saveValues(self, user, section): + if section in self.parser and user is None: + self.save() + elif (user, section) in self.values: + self.db.saveConfig(section, json.dumps(self.values[user, section]), user) + + def delete(self, section, user=None): + """ Deletes values saved in db and cached values for given user, NOT meta data + Does not trigger an error when nothing was deleted. """ + if (user, section) in self.values: + del self.values[user, section] + + self.db.deleteConfig(section, user) + self.core.evm.dispatchEvent("config:deleted", section, user) + + def iterCoreSections(self): + return self.parser.iterSections() + + def iterSections(self, user=None): + """ Yields: section, metadata, values """ + values = self.db.loadConfigsForUser(user) + + # Every section needs to be json decoded + for section, data in values.items(): + try: + values[section] = json.loads(data) if data else {} + except ValueError: + values[section] = {} + self.core.print_exc() + + for name, config in self.config.iteritems(): + yield name, config, values[name] if name in values else {} + + def getSection(self, section, user=None): + if section in self.parser and user is None: + return self.parser.getSection(section) + + values = self.loadValues(user, section) + return self.config.get(section), values diff --git a/pyload/config/ConfigParser.py b/pyload/config/ConfigParser.py new file mode 100644 index 000000000..fd22e93b9 --- /dev/null +++ b/pyload/config/ConfigParser.py @@ -0,0 +1,207 @@ +# -*- coding: utf-8 -*- + +from __future__ import with_statement +from time import sleep +from os.path import exists +from gettext import gettext +from new_collections import namedtuple, OrderedDict + +from pyload.utils import from_string +from pyload.utils.fs import chmod + +from default import make_config + +CONF_VERSION = 2 +SectionTuple = namedtuple("SectionTuple", "name description long_desc config") +ConfigData = namedtuple("ConfigData", "name type description default") + +class ConfigParser: + """ + Holds and manages the configuration + meta data for config read from file. + """ + + CONFIG = "pyload.conf" + + def __init__(self, config=None): + + if config: self.CONFIG = config + + # Meta data information + self.config = OrderedDict() + # The actual config values + self.values = {} + + self.checkVersion() + + self.loadDefault() + self.parseValues(self.CONFIG) + + def loadDefault(self): + make_config(self) + + def checkVersion(self): + """Determines if config needs to be deleted""" + e = None + # workaround conflict, with GUI (which also accesses the config) so try read in 3 times + for i in range(0, 3): + try: + if exists(self.CONFIG): + f = open(self.CONFIG, "rb") + v = f.readline() + f.close() + v = v[v.find(":") + 1:].strip() + + if not v or int(v) < CONF_VERSION: + f = open(self.CONFIG, "wb") + f.write("version: " + str(CONF_VERSION)) + f.close() + print "Old version of %s deleted" % self.CONFIG + else: + f = open(self.CONFIG, "wb") + f.write("version:" + str(CONF_VERSION)) + f.close() + + except Exception, ex: + e = ex + sleep(0.3) + if e: raise e + + + def parseValues(self, filename): + """read config values from file""" + f = open(filename, "rb") + config = f.readlines()[1:] + + # save the current section + section = "" + + for line in config: + line = line.strip() + + # comment line, different variants + if not line or line.startswith("#") or line.startswith("//") or line.startswith(";"): continue + + if line.startswith("["): + section = line.replace("[", "").replace("]", "") + + if section not in self.config: + print "Unrecognized section", section + section = "" + + else: + name, non, value = line.rpartition("=") + name = name.strip() + value = value.strip() + + if not section: + print "Value without section", name + continue + + if name in self.config[section].config: + self.set(section, name, value, sync=False) + else: + print "Unrecognized option", section, name + + + def save(self): + """saves config to filename""" + + configs = [] + f = open(self.CONFIG, "wb") + configs.append(f) + chmod(self.CONFIG, 0600) + f.write("version: %i\n\n" % CONF_VERSION) + + for section, data in self.config.iteritems(): + f.write("[%s]\n" % section) + + for option, data in data.config.iteritems(): + value = self.get(section, option) + if type(value) == unicode: value = value.encode("utf8") + else: value = str(value) + + f.write('%s = %s\n' % (option, value)) + + f.write("\n") + + f.close() + + def __getitem__(self, section): + """provides dictionary like access: c['section']['option']""" + return Section(self, section) + + def __contains__(self, section): + """ checks if parser contains section """ + return section in self.config + + def get(self, section, option): + """get value or default""" + try: + return self.values[section][option] + except KeyError: + return self.config[section].config[option].default + + def set(self, section, option, value, sync=True): + """set value""" + + data = self.config[section].config[option] + value = from_string(value, data.type) + old_value = self.get(section, option) + + # only save when different values + if value != old_value: + if section not in self.values: self.values[section] = {} + self.values[section][option] = value + if sync: + self.save() + return True + + return False + + def getMetaData(self, section, option): + """ get all config data for an option """ + return self.config[section].config[option] + + def iterSections(self): + """ Yields section, config info, values, for all sections """ + + for name, config in self.config.iteritems(): + yield name, config, self.values[name] if name in self.values else {} + + def getSection(self, section): + """ Retrieves single config as tuple (section, values) """ + return self.config[section], self.values[section] if section in self.values else {} + + def addConfigSection(self, section, name, desc, long_desc, config): + """Adds a section to the config. `config` is a list of config tuples as used in plugin api defined as: + The order of the config elements is preserved with OrderedDict + """ + d = OrderedDict() + + for entry in config: + if len(entry) == 5: + conf_name, type, conf_desc, conf_verbose, default = entry + else: # config options without description + conf_name, type, conf_desc, default = entry + conf_verbose = "" + + d[conf_name] = ConfigData(gettext(conf_desc), type, gettext(conf_verbose), from_string(default, type)) + + data = SectionTuple(gettext(name), gettext(desc), gettext(long_desc), d) + self.config[section] = data + +class Section: + """provides dictionary like access for configparser""" + + def __init__(self, parser, section): + """Constructor""" + self.parser = parser + self.section = section + + def __getitem__(self, item): + """getitem""" + return self.parser.get(self.section, item) + + def __setitem__(self, item, value): + """setitem""" + self.parser.set(self.section, item, value) diff --git a/pyload/config/__init__.py b/pyload/config/__init__.py new file mode 100644 index 000000000..4b31e848b --- /dev/null +++ b/pyload/config/__init__.py @@ -0,0 +1 @@ +__author__ = 'christian' diff --git a/pyload/config/default.py b/pyload/config/default.py new file mode 100644 index 000000000..8a2044281 --- /dev/null +++ b/pyload/config/default.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- + +""" +Configuration layout for default base config +""" + +#TODO: write tooltips and descriptions +#TODO: use apis config related classes + +def make_config(config): + # Check if gettext is installed + _ = lambda x: x + + config.addConfigSection("remote", _("Remote"), _("Description"), _("Long description"), + [ + ("activated", "bool", _("Activated"), _("Tooltip"), True), + ("port", "int", _("Port"), _("Tooltip"), 7227), + ("listenaddr", "ip", _("Adress"), _("Tooltip"), "0.0.0.0"), + ]) + + config.addConfigSection("log", _("Log"), _("Description"), _("Long description"), + [ + ("log_size", "int", _("Size in kb"), _("Tooltip"), 100), + ("log_folder", "folder", _("Folder"), _("Tooltip"), "Logs"), + ("file_log", "bool", _("File Log"), _("Tooltip"), True), + ("log_count", "int", _("Count"), _("Tooltip"), 5), + ("log_rotate", "bool", _("Log Rotate"), _("Tooltip"), True), + ]) + + config.addConfigSection("permission", _("Permissions"), _("Description"), _("Long description"), + [ + ("group", "str", _("Groupname"), _("Tooltip"), "users"), + ("change_dl", "bool", _("Change Group and User of Downloads"), _("Tooltip"), False), + ("change_file", "bool", _("Change file mode of downloads"), _("Tooltip"), False), + ("user", "str", _("Username"), _("Tooltip"), "user"), + ("file", "str", _("Filemode for Downloads"), _("Tooltip"), "0644"), + ("change_group", "bool", _("Change group of running process"), _("Tooltip"), False), + ("folder", "str", _("Folder Permission mode"), _("Tooltip"), "0755"), + ("change_user", "bool", _("Change user of running process"), _("Tooltip"), False), + ]) + + config.addConfigSection("general", _("General"), _("Description"), _("Long description"), + [ + ("language", "en;de;fr;it;es;nl;sv;ru;pl;cs;sr;pt_BR", _("Language"), _("Tooltip"), "en"), + ("download_folder", "folder", _("Download Folder"), _("Tooltip"), "Downloads"), + ("checksum", "bool", _("Use Checksum"), _("Tooltip"), False), + ("folder_per_package", "bool", _("Create folder for each package"), _("Tooltip"), True), + ("debug_mode", "bool", _("Debug Mode"), _("Tooltip"), False), + ("min_free_space", "int", _("Min Free Space (MB)"), _("Tooltip"), 200), + ("renice", "int", _("CPU Priority"), _("Tooltip"), 0), + ]) + + config.addConfigSection("ssl", _("SSL"), _("Description"), _("Long description"), + [ + ("cert", "file", _("SSL Certificate"), _("Tooltip"), "ssl.crt"), + ("activated", "bool", _("Activated"), _("Tooltip"), False), + ("key", "file", _("SSL Key"), _("Tooltip"), "ssl.key"), + ]) + + config.addConfigSection("webinterface", _("Webinterface"), _("Description"), _("Long description"), + [ + ("template", "str", _("Template"), _("Tooltip"), "default"), + ("activated", "bool", _("Activated"), _("Tooltip"), True), + ("prefix", "str", _("Path Prefix"), _("Tooltip"), ""), + ("server", "auto;threaded;fallback;fastcgi", _("Server"), _("Tooltip"), "auto"), + ("force_server", "str", _("Favor specific server"), _("Tooltip"), ""), + ("host", "ip", _("IP"), _("Tooltip"), "0.0.0.0"), + ("https", "bool", _("Use HTTPS"), _("Tooltip"), False), + ("port", "int", _("Port"), _("Tooltip"), 8001), + ("develop", "str", _("Development mode"), _(""), False), + ]) + + config.addConfigSection("proxy", _("Proxy"), _("Description"), _("Long description"), + [ + ("username", "str", _("Username"), _("Tooltip"), ""), + ("proxy", "bool", _("Use Proxy"), _("Tooltip"), False), + ("address", "str", _("Address"), _("Tooltip"), "localhost"), + ("password", "password", _("Password"), _("Tooltip"), ""), + ("type", "http;socks4;socks5", _("Protocol"), _("Tooltip"), "http"), + ("port", "int", _("Port"), _("Tooltip"), 7070), + ]) + + config.addConfigSection("reconnect", _("Reconnect"), _("Description"), _("Long description"), + [ + ("endTime", "time", _("End"), _("Tooltip"), "0:00"), + ("activated", "bool", _("Use Reconnect"), _("Tooltip"), False), + ("method", "str", _("Method"), _("Tooltip"), "./reconnect.sh"), + ("startTime", "time", _("Start"), _("Tooltip"), "0:00"), + ]) + + config.addConfigSection("download", _("Download"), _("Description"), _("Long description"), + [ + ("max_downloads", "int", _("Max Parallel Downloads"), _("Tooltip"), 3), + ("limit_speed", "bool", _("Limit Download Speed"), _("Tooltip"), False), + ("interface", "str", _("Download interface to bind (ip or Name)"), _("Tooltip"), ""), + ("skip_existing", "bool", _("Skip already existing files"), _("Tooltip"), False), + ("max_speed", "int", _("Max Download Speed in kb/s"), _("Tooltip"), -1), + ("ipv6", "bool", _("Allow IPv6"), _("Tooltip"), False), + ("chunks", "int", _("Max connections for one download"), _("Tooltip"), 3), + ("restart_failed", "bool", _("Restart failed downloads on startup"), _("Tooltip"), False), + ]) + + config.addConfigSection("downloadTime", _("Download Time"), _("Description"), _("Long description"), + [ + ("start", "time", _("Start"), _("Tooltip"), "0:00"), + ("end", "time", _("End"), _("Tooltip"), "0:00"), + ]) diff --git a/pyload/database/AccountDatabase.py b/pyload/database/AccountDatabase.py new file mode 100644 index 000000000..eaa1a3203 --- /dev/null +++ b/pyload/database/AccountDatabase.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- + +from pyload.database import queue, async +from pyload.database import DatabaseBackend + + +class AccountMethods: + @queue + def loadAccounts(db): + db.c.execute('SELECT plugin, loginname, activated, password, options FROM accounts;') + return db.c.fetchall() + + @async + def saveAccounts(db, data): + # TODO: owner, shared + + db.c.executemany( + 'INSERT INTO accounts(plugin, loginname, activated, password, options) VALUES(?,?,?,?,?)', data) + + @async + def removeAccount(db, plugin, loginname): + db.c.execute('DELETE FROM accounts WHERE plugin=? AND loginname=?', (plugin, loginname)) + + +DatabaseBackend.registerSub(AccountMethods) \ No newline at end of file diff --git a/pyload/database/ConfigDatabase.py b/pyload/database/ConfigDatabase.py new file mode 100644 index 000000000..0c0dd72dd --- /dev/null +++ b/pyload/database/ConfigDatabase.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pyload.database import DatabaseMethods, queue, async + +class ConfigMethods(DatabaseMethods): + + @async + def saveConfig(self, plugin, config, user=None): + if user is None: user = -1 + self.c.execute('INSERT INTO settings(plugin, config, user) VALUES(?,?,?)', (plugin, config, user)) + + + @queue + def loadConfig(self, plugin, user=None): + if user is None: user = -1 + self.c.execute('SELECT config FROM settings WHERE plugin=? AND user=?', (plugin, user)) + + r = self.c.fetchone() + return r[0] if r else "" + + @async + def deleteConfig(self, plugin, user=None): + if user is None: + self.c.execute('DELETE FROM settings WHERE plugin=?', (plugin, )) + else: + self.c.execute('DELETE FROM settings WHERE plugin=? AND user=?', (plugin, user)) + + @queue + def loadAllConfigs(self): + self.c.execute('SELECT user, plugin, config FROM settings') + configs = {} + for r in self.c: + if r[0] in configs: + configs[r[0]][r[1]] = r[2] + else: + configs[r[0]] = {r[1]: r[2]} + + return configs + + @queue + def loadConfigsForUser(self, user=None): + if user is None: user = -1 + self.c.execute('SELECT plugin, config FROM settings WHERE user=?', (user,)) + configs = {} + for r in self.c: + configs[r[0]] = r[1] + + return configs + + @async + def clearAllConfigs(self): + self.c.execute('DELETE FROM settings') + + +ConfigMethods.register() \ No newline at end of file diff --git a/pyload/database/DatabaseBackend.py b/pyload/database/DatabaseBackend.py new file mode 100644 index 000000000..99a406d9c --- /dev/null +++ b/pyload/database/DatabaseBackend.py @@ -0,0 +1,492 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +############################################################################### +# Copyright(c) 2008-2012 pyLoad Team +# http://www.pyload.org +# +# This file is part of pyLoad. +# pyLoad is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# Subjected to the terms and conditions in LICENSE +# +# @author: RaNaN, mkaay +############################################################################### + +from threading import Thread, Event +from shutil import move + +from Queue import Queue +from traceback import print_exc + +from pyload.utils.fs import chmod, exists, remove + +try: + from pysqlite2 import dbapi2 as sqlite3 +except: + import sqlite3 + +DB = None +DB_VERSION = 6 + +def set_DB(db): + global DB + DB = db + + +def queue(f): + @staticmethod + def x(*args, **kwargs): + if DB: + return DB.queue(f, *args, **kwargs) + + return x + + +def async(f): + @staticmethod + def x(*args, **kwargs): + if DB: + return DB.async(f, *args, **kwargs) + + return x + + +def inner(f): + @staticmethod + def x(*args, **kwargs): + if DB: + return f(DB, *args, **kwargs) + + return x + + +class DatabaseMethods: + # stubs for autocompletion + core = None + manager = None + conn = None + c = None + + @classmethod + def register(cls): + DatabaseBackend.registerSub(cls) + + +class DatabaseJob(): + def __init__(self, f, *args, **kwargs): + self.done = Event() + + self.f = f + self.args = args + self.kwargs = kwargs + + self.result = None + self.exception = False + + # import inspect + # self.frame = inspect.currentframe() + + def __repr__(self): + from os.path import basename + + frame = self.frame.f_back + output = "" + for i in range(5): + output += "\t%s:%s, %s\n" % (basename(frame.f_code.co_filename), frame.f_lineno, frame.f_code.co_name) + frame = frame.f_back + del frame + del self.frame + + return "DataBase Job %s:%s\n%sResult: %s" % (self.f.__name__, self.args[1:], output, self.result) + + def processJob(self): + try: + self.result = self.f(*self.args, **self.kwargs) + except Exception, e: + print_exc() + try: + print "Database Error @", self.f.__name__, self.args[1:], self.kwargs, e + except: + pass + + self.exception = e + finally: + self.done.set() + + def wait(self): + self.done.wait() + + +class DatabaseBackend(Thread): + subs = [] + + DB_FILE = "pyload.db" + VERSION_FILE = "db.version" + + def __init__(self, core): + Thread.__init__(self) + self.setDaemon(True) + self.core = core + self.manager = None # set later + self.running = Event() + + self.jobs = Queue() + + set_DB(self) + + def setup(self): + """ *MUST* be called before db can be used !""" + self.start() + self.running.wait() + + def init(self): + """main loop, which executes commands""" + + version = self._checkVersion() + + self.conn = sqlite3.connect(self.DB_FILE) + chmod(self.DB_FILE, 0600) + + self.c = self.conn.cursor() + + if version is not None and version < DB_VERSION: + success = self._convertDB(version) + + # delete database + if not success: + self.c.close() + self.conn.close() + + try: + self.manager.core.log.warning(_("Database was deleted due to incompatible version.")) + except: + print "Database was deleted due to incompatible version." + + remove(self.VERSION_FILE) + move(self.DB_FILE, self.DB_FILE + ".backup") + f = open(self.VERSION_FILE, "wb") + f.write(str(DB_VERSION)) + f.close() + + self.conn = sqlite3.connect(self.DB_FILE) + chmod(self.DB_FILE, 0600) + self.c = self.conn.cursor() + + self._createTables() + self.conn.commit() + + + def run(self): + try: + self.init() + finally: + self.running.set() + + while True: + j = self.jobs.get() + if j == "quit": + self.c.close() + self.conn.commit() + self.conn.close() + self.closing.set() + break + j.processJob() + + + def shutdown(self): + self.running.clear() + self.closing = Event() + self.jobs.put("quit") + self.closing.wait(1) + + def _checkVersion(self): + """ get db version""" + if not exists(self.VERSION_FILE): + f = open(self.VERSION_FILE, "wb") + f.write(str(DB_VERSION)) + f.close() + return + + f = open(self.VERSION_FILE, "rb") + v = int(f.read().strip()) + f.close() + + return v + + def _convertDB(self, v): + try: + return getattr(self, "_convertV%i" % v)() + except: + return False + + #--convert scripts start + + def _convertV6(self): + return False + + #--convert scripts end + + def _createTables(self): + """create tables for database""" + + self.c.execute( + 'CREATE TABLE IF NOT EXISTS "packages" (' + '"pid" INTEGER PRIMARY KEY AUTOINCREMENT, ' + '"name" TEXT NOT NULL, ' + '"folder" TEXT DEFAULT "" NOT NULL, ' + '"site" TEXT DEFAULT "" NOT NULL, ' + '"comment" TEXT DEFAULT "" NOT NULL, ' + '"password" TEXT DEFAULT "" NOT NULL, ' + '"added" INTEGER DEFAULT 0 NOT NULL,' # set by trigger + '"status" INTEGER DEFAULT 0 NOT NULL,' + '"tags" TEXT DEFAULT "" NOT NULL,' + '"shared" INTEGER DEFAULT 0 NOT NULL,' + '"packageorder" INTEGER DEFAULT -1 NOT NULL,' #incremented by trigger + '"root" INTEGER DEFAULT -1 NOT NULL, ' + '"owner" INTEGER NOT NULL, ' + 'FOREIGN KEY(owner) REFERENCES users(uid), ' + 'CHECK (root != pid)' + ')' + ) + + self.c.execute( + 'CREATE TRIGGER IF NOT EXISTS "insert_package" AFTER INSERT ON "packages"' + 'BEGIN ' + 'UPDATE packages SET added = strftime("%s", "now"), ' + 'packageorder = (SELECT max(p.packageorder) + 1 FROM packages p WHERE p.root=new.root) ' + 'WHERE rowid = new.rowid;' + 'END' + ) + + self.c.execute( + 'CREATE TRIGGER IF NOT EXISTS "delete_package" AFTER DELETE ON "packages"' + 'BEGIN ' + 'DELETE FROM files WHERE package = old.pid;' + 'UPDATE packages SET packageorder=packageorder-1 WHERE packageorder > old.packageorder AND root=old.pid;' + 'END' + ) + self.c.execute('CREATE INDEX IF NOT EXISTS "package_index" ON packages(root, owner)') + self.c.execute('CREATE INDEX IF NOT EXISTS "package_owner" ON packages(owner)') + + self.c.execute( + 'CREATE TABLE IF NOT EXISTS "files" (' + '"fid" INTEGER PRIMARY KEY AUTOINCREMENT, ' + '"name" TEXT NOT NULL, ' + '"size" INTEGER DEFAULT 0 NOT NULL, ' + '"status" INTEGER DEFAULT 0 NOT NULL, ' + '"media" INTEGER DEFAULT 1 NOT NULL,' + '"added" INTEGER DEFAULT 0 NOT NULL,' + '"fileorder" INTEGER DEFAULT -1 NOT NULL, ' + '"url" TEXT DEFAULT "" NOT NULL, ' + '"plugin" TEXT DEFAULT "" NOT NULL, ' + '"hash" TEXT DEFAULT "" NOT NULL, ' + '"dlstatus" INTEGER DEFAULT 0 NOT NULL, ' + '"error" TEXT DEFAULT "" NOT NULL, ' + '"package" INTEGER NOT NULL, ' + '"owner" INTEGER NOT NULL, ' + 'FOREIGN KEY(owner) REFERENCES users(uid), ' + 'FOREIGN KEY(package) REFERENCES packages(id)' + ')' + ) + self.c.execute('CREATE INDEX IF NOT EXISTS "file_index" ON files(package, owner)') + self.c.execute('CREATE INDEX IF NOT EXISTS "file_owner" ON files(owner)') + + self.c.execute( + 'CREATE TRIGGER IF NOT EXISTS "insert_file" AFTER INSERT ON "files"' + 'BEGIN ' + 'UPDATE files SET added = strftime("%s", "now"), ' + 'fileorder = (SELECT max(f.fileorder) + 1 FROM files f WHERE f.package=new.package) ' + 'WHERE rowid = new.rowid;' + 'END' + ) + + self.c.execute( + 'CREATE TABLE IF NOT EXISTS "collector" (' + '"owner" INTEGER NOT NULL, ' + '"data" TEXT NOT NULL, ' + 'FOREIGN KEY(owner) REFERENCES users(uid), ' + 'PRIMARY KEY(owner) ON CONFLICT REPLACE' + ') ' + ) + + self.c.execute( + 'CREATE TABLE IF NOT EXISTS "storage" (' + '"identifier" TEXT NOT NULL, ' + '"key" TEXT NOT NULL, ' + '"value" TEXT DEFAULT "", ' + 'PRIMARY KEY (identifier, key) ON CONFLICT REPLACE' + ')' + ) + + self.c.execute( + 'CREATE TABLE IF NOT EXISTS "users" (' + '"uid" INTEGER PRIMARY KEY AUTOINCREMENT, ' + '"name" TEXT NOT NULL UNIQUE, ' + '"email" TEXT DEFAULT "" NOT NULL, ' + '"password" TEXT NOT NULL, ' + '"role" INTEGER DEFAULT 0 NOT NULL, ' + '"permission" INTEGER DEFAULT 0 NOT NULL, ' + '"folder" TEXT DEFAULT "" NOT NULL, ' + '"traffic" INTEGER DEFAULT -1 NOT NULL, ' + '"dllimit" INTEGER DEFAULT -1 NOT NULL, ' + '"dlquota" TEXT DEFAULT "" NOT NULL, ' + '"hddquota" INTEGER DEFAULT -1 NOT NULL, ' + '"template" TEXT DEFAULT "default" NOT NULL, ' + '"user" INTEGER DEFAULT -1 NOT NULL, ' # set by trigger to self + 'FOREIGN KEY(user) REFERENCES users(uid)' + ')' + ) + self.c.execute('CREATE INDEX IF NOT EXISTS "username_index" ON users(name)') + + self.c.execute( + 'CREATE TRIGGER IF NOT EXISTS "insert_user" AFTER INSERT ON "users"' + 'BEGIN ' + 'UPDATE users SET user = new.uid, folder=new.name ' + 'WHERE rowid = new.rowid;' + 'END' + ) + + self.c.execute( + 'CREATE TABLE IF NOT EXISTS "settings" (' + '"plugin" TEXT NOT NULL, ' + '"user" INTEGER DEFAULT -1 NOT NULL, ' + '"config" TEXT NOT NULL, ' + 'FOREIGN KEY(user) REFERENCES users(uid), ' + 'PRIMARY KEY (plugin, user) ON CONFLICT REPLACE' + ')' + ) + + self.c.execute( + 'CREATE TABLE IF NOT EXISTS "accounts" (' + '"plugin" TEXT NOT NULL, ' + '"loginname" TEXT NOT NULL, ' + '"owner" INTEGER NOT NULL DEFAULT -1, ' + '"activated" INTEGER DEFAULT 1, ' + '"password" TEXT DEFAULT "", ' + '"shared" INTEGER DEFAULT 0, ' + '"options" TEXT DEFAULT "", ' + 'FOREIGN KEY(owner) REFERENCES users(uid), ' + 'PRIMARY KEY (plugin, loginname, owner) ON CONFLICT REPLACE' + ')' + ) + + self.c.execute( + 'CREATE TABLE IF NOT EXISTS "stats" (' + '"user" INTEGER NOT NULL, ' + '"plugin" TEXT NOT NULL, ' + '"time" INTEGER NOT NULL, ' + '"premium" INTEGER DEFAULT 0 NOT NULL, ' + '"amount" INTEGER DEFAULT 0 NOT NULL, ' + 'FOREIGN KEY(user) REFERENCES users(uid), ' + 'PRIMARY KEY(user, plugin, time)' + ')' + ) + self.c.execute('CREATE INDEX IF NOT EXISTS "stats_time" ON stats(time)') + + #try to lower ids + self.c.execute('SELECT max(fid) FROM files') + fid = self.c.fetchone()[0] + fid = int(fid) if fid else 0 + self.c.execute('UPDATE SQLITE_SEQUENCE SET seq=? WHERE name=?', (fid, "files")) + + self.c.execute('SELECT max(pid) FROM packages') + pid = self.c.fetchone()[0] + pid = int(pid) if pid else 0 + self.c.execute('UPDATE SQLITE_SEQUENCE SET seq=? WHERE name=?', (pid, "packages")) + + self.c.execute('VACUUM') + + + def createCursor(self): + return self.conn.cursor() + + @async + def commit(self): + self.conn.commit() + + @queue + def syncSave(self): + self.conn.commit() + + @async + def rollback(self): + self.conn.rollback() + + def async(self, f, *args, **kwargs): + args = (self, ) + args + job = DatabaseJob(f, *args, **kwargs) + self.jobs.put(job) + + def queue(self, f, *args, **kwargs): + args = (self, ) + args + job = DatabaseJob(f, *args, **kwargs) + self.jobs.put(job) + # only wait when db is running + if self.running.isSet(): job.wait() + return job.result + + @classmethod + def registerSub(cls, klass): + cls.subs.append(klass) + + @classmethod + def unregisterSub(cls, klass): + cls.subs.remove(klass) + + def __getattr__(self, attr): + for sub in DatabaseBackend.subs: + if hasattr(sub, attr): + return getattr(sub, attr) + raise AttributeError(attr) + +if __name__ == "__main__": + db = DatabaseBackend() + db.setup() + + class Test(): + @queue + def insert(db): + c = db.createCursor() + for i in range(1000): + c.execute("INSERT INTO storage (identifier, key, value) VALUES (?, ?, ?)", ("foo", i, "bar")) + + @async + def insert2(db): + c = db.createCursor() + for i in range(1000 * 1000): + c.execute("INSERT INTO storage (identifier, key, value) VALUES (?, ?, ?)", ("foo", i, "bar")) + + @queue + def select(db): + c = db.createCursor() + for i in range(10): + res = c.execute("SELECT value FROM storage WHERE identifier=? AND key=?", ("foo", i)) + print res.fetchone() + + @queue + def error(db): + c = db.createCursor() + print "a" + c.execute("SELECT myerror FROM storage WHERE identifier=? AND key=?", ("foo", i)) + print "e" + + db.registerSub(Test) + from time import time + + start = time() + for i in range(100): + db.insert() + end = time() + print end - start + + start = time() + db.insert2() + end = time() + print end - start + + db.error() + diff --git a/pyload/database/FileDatabase.py b/pyload/database/FileDatabase.py new file mode 100644 index 000000000..7b39cfa47 --- /dev/null +++ b/pyload/database/FileDatabase.py @@ -0,0 +1,448 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +############################################################################### +# Copyright(c) 2008-2012 pyLoad Team +# http://www.pyload.org +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# Subjected to the terms and conditions in LICENSE +# +# @author: RaNaN +############################################################################### + +from new_collections import OrderedDict + +from pyload.Api import DownloadInfo, FileInfo, PackageInfo, PackageStats, DownloadState as DS, state_string +from pyload.database import DatabaseMethods, queue, async, inner + +zero_stats = PackageStats(0, 0, 0, 0) + + +class FileMethods(DatabaseMethods): + + @queue + def filecount(self): + """returns number of files, currently only used for debugging""" + self.c.execute("SELECT COUNT(*) FROM files") + return self.c.fetchone()[0] + + @queue + def downloadstats(self, user=None): + """ number of downloads and size """ + if user is None: + self.c.execute("SELECT COUNT(*), SUM(f.size) FROM files f WHERE dlstatus != 0") + else: + self.c.execute( + "SELECT COUNT(*), SUM(f.size) FROM files f, packages p WHERE f.package = p.pid AND dlstatus != 0", + user) + + r = self.c.fetchone() + # sum is None when no elements are added + return (r[0], r[1] if r[1] is not None else 0) if r else (0, 0) + + @queue + def queuestats(self, user=None): + """ number and size of files in queue not finished yet""" + # status not in NA, finished, skipped + if user is None: + self.c.execute("SELECT COUNT(*), SUM(f.size) FROM files f WHERE dlstatus NOT IN (0,5,6)") + else: + self.c.execute( + "SELECT COUNT(*), SUM(f.size) FROM files f, package p WHERE f.package = p.pid AND p.owner=? AND dlstatus NOT IN (0,5,6)", + user) + + r = self.c.fetchone() + return (r[0], r[1] if r[1] is not None else 0) if r else (0, 0) + + + # TODO: multi user? + @queue + def processcount(self, fid=-1, user=None): + """ number of files which have to be processed """ + # status in online, queued, starting, waiting, downloading + self.c.execute("SELECT COUNT(*), SUM(size) FROM files WHERE dlstatus IN (2,3,8,9,10) AND fid != ?", (fid, )) + return self.c.fetchone()[0] + + @queue + def processstats(self, user=None): + if user is None: + self.c.execute("SELECT COUNT(*), SUM(size) FROM files WHERE dlstatus IN (2,3,8,9,10)") + else: + self.c.execute( + "SELECT COUNT(*), SUM(f.size) FROM files f, packages p WHERE f.package = p.pid AND dlstatus IN (2,3,8,9,10)", + user) + r = self.c.fetchone() + return (r[0], r[1] if r[1] is not None else 0) if r else (0, 0) + + @queue + def addLink(self, url, name, plugin, package, owner): + # mark file status initially as missing, dlstatus - queued + self.c.execute('INSERT INTO files(url, name, plugin, status, dlstatus, package, owner) VALUES(?,?,?,1,3,?,?)', + (url, name, plugin, package, owner)) + return self.c.lastrowid + + @async + def addLinks(self, links, package, owner): + """ links is a list of tuples (url, plugin)""" + links = [(x[0], x[0], x[1], package, owner) for x in links] + self.c.executemany( + 'INSERT INTO files(url, name, plugin, status, dlstatus, package, owner) VALUES(?,?,?,1,3,?,?)', + links) + + @queue + def addFile(self, name, size, media, package, owner): + # file status - ok, dl status NA + self.c.execute('INSERT INTO files(name, size, media, package, owner) VALUES(?,?,?,?,?)', + (name, size, media, package, owner)) + return self.c.lastrowid + + @queue + def addPackage(self, name, folder, root, password, site, comment, status, owner): + self.c.execute( + 'INSERT INTO packages(name, folder, root, password, site, comment, status, owner) VALUES(?,?,?,?,?,?,?,?)' + , (name, folder, root, password, site, comment, status, owner)) + return self.c.lastrowid + + @async + def deletePackage(self, pid, owner=None): + # order updated by trigger, as well as links deleted + if owner is None: + self.c.execute('DELETE FROM packages WHERE pid=?', (pid,)) + else: + self.c.execute('DELETE FROM packages WHERE pid=? AND owner=?', (pid, owner)) + + @async + def deleteFile(self, fid, order, package, owner=None): + """ To delete a file order and package of it is needed """ + if owner is None: + self.c.execute('DELETE FROM files WHERE fid=?', (fid,)) + self.c.execute('UPDATE files SET fileorder=fileorder-1 WHERE fileorder > ? AND package=?', + (order, package)) + else: + self.c.execute('DELETE FROM files WHERE fid=? AND owner=?', (fid, owner)) + self.c.execute('UPDATE files SET fileorder=fileorder-1 WHERE fileorder > ? AND package=? AND owner=?', + (order, package, owner)) + + @async + def saveCollector(self, owner, data): + """ simply save the json string to database """ + self.c.execute("INSERT INTO collector(owner, data) VALUES (?,?)", (owner, data)) + + @queue + def retrieveCollector(self, owner): + """ retrieve the saved string """ + self.c.execute('SELECT data FROM collector WHERE owner=?', (owner,)) + r = self.c.fetchone() + if not r: return None + return r[0] + + @async + def deleteCollector(self, owner): + """ drop saved user collector """ + self.c.execute('DELETE FROM collector WHERE owner=?', (owner,)) + + @queue + def getAllFiles(self, package=None, search=None, state=None, owner=None): + """ Return dict with file information + + :param package: optional package to filter out + :param search: or search string for file name + :param unfinished: filter by dlstatus not finished + :param owner: only specific owner + """ + qry = ('SELECT fid, name, owner, size, status, media, added, fileorder, ' + 'url, plugin, hash, dlstatus, error, package FROM files WHERE ') + + arg = [] + + if state is not None and state != DS.All: + qry += 'dlstatus IN (%s) AND ' % state_string(state) + if owner is not None: + qry += 'owner=? AND ' + arg.append(owner) + + if package is not None: + arg.append(package) + qry += 'package=? AND ' + if search is not None: + search = "%%%s%%" % search.strip("%") + arg.append(search) + qry += "name LIKE ? " + + # make qry valid + if qry.endswith("WHERE "): qry = qry[:-6] + if qry.endswith("AND "): qry = qry[:-4] + + self.c.execute(qry + "ORDER BY package, fileorder", arg) + + data = OrderedDict() + for r in self.c: + f = FileInfo(r[0], r[1], r[13], r[2], r[3], r[4], r[5], r[6], r[7]) + if r[11] > 0: # dl status != NA + f.download = DownloadInfo(r[8], r[9], r[10], r[11], self.manager.statusMsg[r[11]], r[12]) + + data[r[0]] = f + + return data + + @queue + def getMatchingFilenames(self, pattern, owner=None): + """ Return matching file names for pattern, useful for search suggestions """ + qry = 'SELECT name FROM files WHERE name LIKE ?' + args = ["%%%s%%" % pattern.strip("%")] + if owner: + qry += " AND owner=?" + args.append(owner) + + self.c.execute(qry, args) + return [r[0] for r in self.c] + + @queue + def getAllPackages(self, root=None, owner=None, tags=None): + """ Return dict with package information + + :param root: optional root to filter + :param owner: optional user id + :param tags: optional tag list + """ + qry = ( + 'SELECT pid, name, folder, root, owner, site, comment, password, added, tags, status, shared, packageorder ' + 'FROM packages%s ORDER BY root, packageorder') + + if root is None: + stats = self.getPackageStats(owner=owner) + if owner is None: + self.c.execute(qry % "") + else: + self.c.execute(qry % " WHERE owner=?", (owner,)) + else: + stats = self.getPackageStats(root=root, owner=owner) + if owner is None: + self.c.execute(qry % ' WHERE root=? OR pid=?', (root, root)) + else: + self.c.execute(qry % ' WHERE (root=? OR pid=?) AND owner=?', (root, root, owner)) + + data = OrderedDict() + for r in self.c: + data[r[0]] = PackageInfo( + r[0], r[1], r[2], r[3], r[4], r[5], r[6], r[7], r[8], r[9].split(","), r[10], r[11], r[12], + stats.get(r[0], zero_stats) + ) + + return data + + @inner + def getPackageStats(self, pid=None, root=None, owner=None): + qry = ("SELECT p.pid, SUM(f.size) AS sizetotal, COUNT(f.fid) AS linkstotal, sizedone, linksdone " + "FROM packages p JOIN files f ON p.pid = f.package AND f.dlstatus > 0 %(sub)s LEFT OUTER JOIN " + "(SELECT p.pid AS pid, SUM(f.size) AS sizedone, COUNT(f.fid) AS linksdone " + "FROM packages p JOIN files f ON p.pid = f.package %(sub)s AND f.dlstatus in (5,6) GROUP BY p.pid) s ON s.pid = p.pid " + "GROUP BY p.pid") + + # status in (finished, skipped, processing) + + if root is not None: + self.c.execute(qry % {"sub": "AND (p.root=:root OR p.pid=:root)"}, locals()) + elif pid is not None: + self.c.execute(qry % {"sub": "AND p.pid=:pid"}, locals()) + elif owner is not None: + self.c.execute(qry % {"sub": "AND p.owner=:owner"}, locals()) + else: + self.c.execute(qry % {"sub": ""}) + + data = {} + for r in self.c: + data[r[0]] = PackageStats( + r[2] if r[2] else 0, + r[4] if r[4] else 0, + int(r[1]) if r[1] else 0, + int(r[3]) if r[3] else 0, + ) + + return data + + @queue + def getStatsForPackage(self, pid): + return self.getPackageStats(pid=pid)[pid] + + @queue + def getFileInfo(self, fid, force=False): + """get data for specific file, when force is true download info will be appended""" + self.c.execute('SELECT fid, name, owner, size, status, media, added, fileorder, ' + 'url, plugin, hash, dlstatus, error, package FROM files ' + 'WHERE fid=?', (fid,)) + r = self.c.fetchone() + if not r: + return None + else: + f = FileInfo(r[0], r[1], r[13], r[2], r[3], r[4], r[5], r[6], r[7]) + if r[11] > 0 or force: + f.download = DownloadInfo(r[8], r[9], r[10], r[11], self.manager.statusMsg[r[11]], r[12]) + + return f + + @queue + def getPackageInfo(self, pid, stats=True): + """get data for a specific package, optionally with package stats""" + if stats: + stats = self.getPackageStats(pid=pid) + + self.c.execute( + 'SELECT pid, name, folder, root, owner, site, comment, password, added, tags, status, shared, packageorder ' + 'FROM packages WHERE pid=?', (pid,)) + + r = self.c.fetchone() + if not r: + return None + else: + return PackageInfo( + r[0], r[1], r[2], r[3], r[4], r[5], r[6], r[7], r[8], r[9].split(","), r[10], r[11], r[12], + stats.get(r[0], zero_stats) if stats else None + ) + + # TODO: does this need owner? + @async + def updateLinkInfo(self, data): + """ data is list of tuples (name, size, status,[ hash,] url)""" + # status in (NA, Offline, Online, Queued, TempOffline) + if data and len(data[0]) == 4: + self.c.executemany('UPDATE files SET name=?, size=?, dlstatus=? WHERE url=? AND dlstatus IN (0,1,2,3,11)', + data) + else: + self.c.executemany( + 'UPDATE files SET name=?, size=?, dlstatus=?, hash=? WHERE url=? AND dlstatus IN (0,1,2,3,11)', data) + + @async + def updateFile(self, f): + self.c.execute('UPDATE files SET name=?, size=?, status=?,' + 'media=?, url=?, hash=?, dlstatus=?, error=? WHERE fid=?', + (f.name, f.size, f.filestatus, f.media, f.url, + f.hash, f.status, f.error, f.fid)) + + @async + def updatePackage(self, p): + self.c.execute( + 'UPDATE packages SET name=?, folder=?, site=?, comment=?, password=?, tags=?, status=?, shared=? WHERE pid=?', + (p.name, p.folder, p.site, p.comment, p.password, ",".join(p.tags), p.status, p.shared, p.pid)) + + # TODO: most modifying methods needs owner argument to avoid checking beforehand + @async + def orderPackage(self, pid, root, oldorder, order): + if oldorder > order: # package moved upwards + self.c.execute( + 'UPDATE packages SET packageorder=packageorder+1 WHERE packageorder >= ? AND packageorder < ? AND root=? AND packageorder >= 0' + , (order, oldorder, root)) + elif oldorder < order: # moved downwards + self.c.execute( + 'UPDATE packages SET packageorder=packageorder-1 WHERE packageorder <= ? AND packageorder > ? AND root=? AND packageorder >= 0' + , (order, oldorder, root)) + + self.c.execute('UPDATE packages SET packageorder=? WHERE pid=?', (order, pid)) + + @async + def orderFiles(self, pid, fids, oldorder, order): + diff = len(fids) + data = [] + + if oldorder > order: # moved upwards + self.c.execute('UPDATE files SET fileorder=fileorder+? WHERE fileorder >= ? AND fileorder < ? AND package=?' + , (diff, order, oldorder, pid)) + data = [(order + i, fid) for i, fid in enumerate(fids)] + elif oldorder < order: + self.c.execute( + 'UPDATE files SET fileorder=fileorder-? WHERE fileorder <= ? AND fileorder >= ? AND package=?' + , (diff, order, oldorder + diff, pid)) + data = [(order - diff + i + 1, fid) for i, fid in enumerate(fids)] + + self.c.executemany('UPDATE files SET fileorder=? WHERE fid=?', data) + + @async + def moveFiles(self, pid, fids, package): + self.c.execute('SELECT max(fileorder) FROM files WHERE package=?', (package,)) + r = self.c.fetchone() + order = (r[0] if r[0] else 0) + 1 + + self.c.execute('UPDATE files SET fileorder=fileorder-? WHERE fileorder > ? AND package=?', + (len(fids), order, pid)) + + data = [(package, order + i, fid) for i, fid in enumerate(fids)] + self.c.executemany('UPDATE files SET package=?, fileorder=? WHERE fid=?', data) + + @async + def movePackage(self, root, order, pid, dpid): + self.c.execute('SELECT max(packageorder) FROM packages WHERE root=?', (dpid,)) + r = self.c.fetchone() + max = (r[0] if r[0] else 0) + 1 + + self.c.execute('UPDATE packages SET packageorder=packageorder-1 WHERE packageorder > ? AND root=?', + (order, root)) + + self.c.execute('UPDATE packages SET root=?, packageorder=? WHERE pid=?', (dpid, max, pid)) + + @async + def restartFile(self, fid): + # status -> queued + self.c.execute('UPDATE files SET dlstatus=3, error="" WHERE fid=?', (fid,)) + + @async + def restartPackage(self, pid): + # status -> queued + self.c.execute('UPDATE files SET status=3 WHERE package=?', (pid,)) + + + # TODO: multi user approach + @queue + def getJob(self, occ): + """return pyfile ids, which are suitable for download and don't use a occupied plugin""" + cmd = "(%s)" % ", ".join(["'%s'" % x for x in occ]) + #TODO + + # dlstatus in online, queued | package status = ok + cmd = ("SELECT f.fid FROM files as f INNER JOIN packages as p ON f.package=p.pid " + "WHERE f.plugin NOT IN %s AND f.dlstatus IN (2,3) AND p.status=0 " + "ORDER BY p.packageorder ASC, f.fileorder ASC LIMIT 5") % cmd + + self.c.execute(cmd) + + return [x[0] for x in self.c] + + @queue + def getUnfinished(self, pid): + """return list of max length 3 ids with pyfiles in package not finished or processed""" + + # status in finished, skipped, processing + self.c.execute("SELECT fid FROM files WHERE package=? AND dlstatus NOT IN (5, 6, 14) LIMIT 3", (pid,)) + return [r[0] for r in self.c] + + @queue + def restartFailed(self, owner): + # status=queued, where status in failed, aborted, temp offline + self.c.execute("UPDATE files SET dlstatus=3, error='' WHERE dlstatus IN (7, 11, 12)") + + @queue + def findDuplicates(self, id, folder, filename): + """ checks if filename exists with different id and same package, dlstatus = finished """ + # TODO: also check root of package + self.c.execute( + "SELECT f.plugin FROM files f INNER JOIN packages as p ON f.package=p.pid AND p.folder=? WHERE f.fid!=? AND f.dlstatus=5 AND f.name=?" + , (folder, id, filename)) + return self.c.fetchone() + + @queue + def purgeLinks(self): + # fstatus = missing + self.c.execute("DELETE FROM files WHERE status == 1") + + @queue + def purgeAll(self): # only used for debugging + self.c.execute("DELETE FROM packages") + self.c.execute("DELETE FROM files") + self.c.execute("DELETE FROM collector") + + +FileMethods.register() \ No newline at end of file diff --git a/pyload/database/StatisticDatabase.py b/pyload/database/StatisticDatabase.py new file mode 100644 index 000000000..d5f9658f2 --- /dev/null +++ b/pyload/database/StatisticDatabase.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pyload.database import DatabaseMethods, queue, async, inner + +# TODO + +class StatisticMethods(DatabaseMethods): + pass + + + +StatisticMethods.register() \ No newline at end of file diff --git a/pyload/database/StorageDatabase.py b/pyload/database/StorageDatabase.py new file mode 100644 index 000000000..2d4c8a9c7 --- /dev/null +++ b/pyload/database/StorageDatabase.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: mkaay +""" + +from pyload.database import DatabaseBackend, queue + +class StorageMethods(): + @queue + def setStorage(db, identifier, key, value): + db.c.execute("SELECT id FROM storage WHERE identifier=? AND key=?", (identifier, key)) + if db.c.fetchone() is not None: + db.c.execute("UPDATE storage SET value=? WHERE identifier=? AND key=?", (value, identifier, key)) + else: + db.c.execute("INSERT INTO storage (identifier, key, value) VALUES (?, ?, ?)", (identifier, key, value)) + + @queue + def getStorage(db, identifier, key=None): + if key is not None: + db.c.execute("SELECT value FROM storage WHERE identifier=? AND key=?", (identifier, key)) + row = db.c.fetchone() + if row is not None: + return row[0] + else: + db.c.execute("SELECT key, value FROM storage WHERE identifier=?", (identifier, )) + d = {} + for row in db.c: + d[row[0]] = row[1] + return d + + @queue + def delStorage(db, identifier, key): + db.c.execute("DELETE FROM storage WHERE identifier=? AND key=?", (identifier, key)) + +DatabaseBackend.registerSub(StorageMethods) diff --git a/pyload/database/UserDatabase.py b/pyload/database/UserDatabase.py new file mode 100644 index 000000000..3dd06c912 --- /dev/null +++ b/pyload/database/UserDatabase.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- + +############################################################################### +# Copyright(c) 2008-2012 pyLoad Team +# http://www.pyload.org +# +# This file is part of pyLoad. +# pyLoad is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# Subjected to the terms and conditions in LICENSE +# +# @author: RaNaN +############################################################################### + +from hashlib import sha1 +from string import letters, digits +from random import choice + +alphnum = letters+digits + +from pyload.Api import UserData + +from DatabaseBackend import DatabaseMethods, queue, async + +def random_salt(): + return "".join(choice(alphnum) for x in range(0,5)) + +class UserMethods(DatabaseMethods): + + @queue + def addUser(self, user, password): + salt = random_salt() + h = sha1(salt + password) + password = salt + h.hexdigest() + + self.c.execute('SELECT name FROM users WHERE name=?', (user, )) + if self.c.fetchone() is not None: + self.c.execute('UPDATE users SET password=? WHERE name=?', (password, user)) + else: + self.c.execute('INSERT INTO users (name, password) VALUES (?, ?)', (user, password)) + + @queue + def getUserData(self, name=None, uid=None): + qry = ('SELECT uid, name, email, role, permission, folder, traffic, dllimit, dlquota, ' + 'hddquota, user, template FROM "users" WHERE ') + + if name is not None: + self.c.execute(qry + "name=?", (name,)) + r = self.c.fetchone() + if r: + return UserData(*r) + + elif uid is not None: + self.c.execute(qry + "uid=?", (uid,)) + r = self.c.fetchone() + if r: + return UserData(*r) + + return None + + @queue + def getAllUserData(self): + self.c.execute('SELECT uid, name, email, role, permission, folder, traffic, dllimit, dlquota, ' + 'hddquota, user, template FROM "users"') + user = {} + for r in self.c: + user[r[0]] = UserData(*r) + + return user + + + @queue + def checkAuth(self, user, password): + self.c.execute('SELECT uid, name, email, role, permission, folder, traffic, dllimit, dlquota, ' + 'hddquota, user, template, password FROM "users" WHERE name=?', (user, )) + r = self.c.fetchone() + if not r: + return None + salt = r[-1][:5] + pw = r[-1][5:] + h = sha1(salt + password) + if h.hexdigest() == pw: + return UserData(*r[:-1]) + else: + return None + + @queue #TODO + def changePassword(self, user, oldpw, newpw): + self.c.execute('SELECT rowid, name, password FROM users WHERE name=?', (user, )) + r = self.c.fetchone() + if not r: + return False + + salt = r[2][:5] + pw = r[2][5:] + h = sha1(salt + oldpw) + if h.hexdigest() == pw: + salt = random_salt() + h = sha1(salt + newpw) + password = salt + h.hexdigest() + + self.c.execute("UPDATE users SET password=? WHERE name=?", (password, user)) + return True + + return False + + @async + def setPermission(self, user, perms): + self.c.execute("UPDATE users SET permission=? WHERE name=?", (perms, user)) + + @async + def setRole(self, user, role): + self.c.execute("UPDATE users SET role=? WHERE name=?", (role, user)) + + # TODO update methods + + @async + def removeUser(self, uid=None): + # deletes user and all associated accounts + self.c.execute('DELETE FROM users WHERE user=?', (uid, )) + +UserMethods.register() diff --git a/pyload/database/__init__.py b/pyload/database/__init__.py new file mode 100644 index 000000000..d3f97fb53 --- /dev/null +++ b/pyload/database/__init__.py @@ -0,0 +1,8 @@ +from DatabaseBackend import DatabaseMethods, DatabaseBackend, queue, async, inner + +from FileDatabase import FileMethods +from UserDatabase import UserMethods +from StorageDatabase import StorageMethods +from AccountDatabase import AccountMethods +from ConfigDatabase import ConfigMethods +from StatisticDatabase import StatisticMethods \ No newline at end of file diff --git a/pyload/datatypes/PyFile.py b/pyload/datatypes/PyFile.py new file mode 100644 index 000000000..ba5208795 --- /dev/null +++ b/pyload/datatypes/PyFile.py @@ -0,0 +1,268 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +############################################################################### +# Copyright(c) 2008-2012 pyLoad Team +# http://www.pyload.org +# +# This file is part of pyLoad. +# pyLoad is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# Subjected to the terms and conditions in LICENSE +# +# @author: RaNaN +############################################################################### + +from time import sleep, time +from ReadWriteLock import ReadWriteLock + +from pyload.Api import ProgressInfo, DownloadProgress, FileInfo, DownloadInfo, DownloadStatus +from pyload.utils import lock, read_lock + +statusMap = { + "none": 0, + "offline": 1, + "online": 2, + "queued": 3, + "paused": 4, + "finished": 5, + "skipped": 6, + "failed": 7, + "starting": 8, + "waiting": 9, + "downloading": 10, + "temp. offline": 11, + "aborted": 12, + "decrypting": 13, + "processing": 14, + "custom": 15, + "unknown": 16, +} + + +class PyFile(object): + """ + Represents a file object at runtime + """ + __slots__ = ("m", "fid", "_name", "_size", "filestatus", "media", "added", "fileorder", + "url", "pluginname", "hash", "status", "error", "packageid", "ownerid", + "lock", "plugin", "waitUntil", "abort", "statusname", + "reconnected", "pluginclass") + + @staticmethod + def fromInfoData(m, info): + f = PyFile(m, info.fid, info.name, info.size, info.status, info.media, info.added, info.fileorder, + "", "", "", DownloadStatus.NA, "", info.package, info.owner) + if info.download: + f.url = info.download.url + f.pluginname = info.download.plugin + f.hash = info.download.hash + f.status = info.download.status + f.error = info.download.error + + return f + + def __init__(self, manager, fid, name, size, filestatus, media, added, fileorder, + url, pluginname, hash, status, error, package, owner): + + self.m = manager + + self.fid = int(fid) + self._name = name + self._size = size + self.filestatus = filestatus + self.media = media + self.added = added + self.fileorder = fileorder + self.url = url + self.pluginname = pluginname + self.hash = hash + self.status = status + self.error = error + self.ownerid = owner + self.packageid = package + # database information ends here + + self.lock = ReadWriteLock() + + self.plugin = None + + self.waitUntil = 0 # time() + time to wait + + # status attributes + self.abort = False + self.reconnected = False + self.statusname = None + + + @property + def id(self): + self.m.core.log.debug("Deprecated attr .id, use .fid instead") + return self.fid + + def setSize(self, value): + self._size = int(value) + + # will convert all sizes to ints + size = property(lambda self: self._size, setSize) + + def getName(self): + try: + if self.plugin.req.name: + return self.plugin.req.name + else: + return self._name + except: + return self._name + + def setName(self, name): + """ Only set unicode or utf8 strings as name """ + if type(name) == str: + name = name.decode("utf8") + + self._name = name + + name = property(getName, setName) + + def __repr__(self): + return "" % (self.id, self.name, self.pluginname) + + @lock + def initPlugin(self): + """ inits plugin instance """ + if not self.plugin: + self.pluginclass = self.m.core.pluginManager.getPlugin(self.pluginname) + self.plugin = self.pluginclass(self) + + @read_lock + def hasPlugin(self): + """Thread safe way to determine this file has initialized plugin attribute""" + return hasattr(self, "plugin") and self.plugin + + def package(self): + """ return package instance""" + return self.m.getPackage(self.packageid) + + def setStatus(self, status): + self.status = statusMap[status] + # needs to sync so status is written to database + self.sync() + + def setCustomStatus(self, msg, status="processing"): + self.statusname = msg + self.setStatus(status) + + def getStatusName(self): + if self.status not in (13, 14) or not self.statusname: + return self.m.statusMsg[self.status] + else: + return self.statusname + + def hasStatus(self, status): + return statusMap[status] == self.status + + def sync(self): + """sync PyFile instance with database""" + self.m.updateFile(self) + + @lock + def release(self): + """sync and remove from cache""" + if hasattr(self, "plugin") and self.plugin: + self.plugin.clean() + del self.plugin + + self.m.releaseFile(self.fid) + + + def toInfoData(self): + return FileInfo(self.fid, self.getName(), self.packageid, self.ownerid, self.getSize(), self.filestatus, + self.media, self.added, self.fileorder, DownloadInfo( + self.url, self.pluginname, self.hash, self.status, self.getStatusName(), self.error + ) + ) + + def getPath(self): + pass + + def move(self, pid): + pass + + @read_lock + def abortDownload(self): + """abort pyfile if possible""" + # TODO: abort timeout, currently dead locks + while self.id in self.m.core.threadManager.processingIds(): + self.abort = True + if self.plugin and self.plugin.req: + self.plugin.req.abortDownloads() + sleep(0.1) + + self.abort = False + if self.plugin and self.plugin.req: + self.plugin.req.abortDownloads() + + self.release() + + def finishIfDone(self): + """set status to finish and release file if every thread is finished with it""" + + if self.id in self.m.core.threadManager.processingIds(): + return False + + self.setStatus("finished") + self.release() + self.m.checkAllLinksFinished() + return True + + def checkIfProcessed(self): + self.m.checkAllLinksProcessed(self.id) + + def getSpeed(self): + """ calculates speed """ + try: + return self.plugin.req.speed + except: + return 0 + + def getETA(self): + """ gets established time of arrival / or waiting time""" + try: + if self.status == DownloadStatus.Waiting: + return self.waitUntil - time() + + return self.getBytesLeft() / self.getSpeed() + except: + return 0 + + def getBytesArrived(self): + """ gets bytes arrived """ + try: + return self.plugin.req.arrived + except: + return 0 + + def getBytesLeft(self): + """ gets bytes left """ + try: + return self.plugin.req.size - self.plugin.req.arrived + except: + return 0 + + def getSize(self): + """ get size of download """ + try: + if self.plugin.req.size: + return self.plugin.req.size + else: + return self.size + except: + return self.size + + def getProgressInfo(self): + return ProgressInfo(self.pluginname, self.name, self.getStatusName(), self.getETA(), + self.getBytesArrived(), self.getSize(), + DownloadProgress(self.fid, self.packageid, self.getSpeed(), self.status)) diff --git a/pyload/datatypes/PyPackage.py b/pyload/datatypes/PyPackage.py new file mode 100644 index 000000000..d23ae88e7 --- /dev/null +++ b/pyload/datatypes/PyPackage.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +############################################################################### +# Copyright(c) 2008-2012 pyLoad Team +# http://www.pyload.org +# +# This file is part of pyLoad. +# pyLoad is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# Subjected to the terms and conditions in LICENSE +# +# @author: RaNaN +############################################################################### + +from time import time + +from pyload.Api import PackageInfo, PackageStatus +from pyload.utils.fs import join + +class PyPackage: + """ + Represents a package object at runtime + """ + + @staticmethod + def fromInfoData(m, info): + return PyPackage(m, info.pid, info.name, info.folder, info.root, info.owner, + info.site, info.comment, info.password, info.added, info.tags, info.status, info.shared, info.packageorder) + + def __init__(self, manager, pid, name, folder, root, owner, site, comment, password, added, tags, status, + shared, packageorder): + self.m = manager + + self.pid = pid + self.name = name + self.folder = folder + self.root = root + self.ownerid = owner + self.site = site + self.comment = comment + self.password = password + self.added = added + self.tags = tags + self.status = status + self.shared = shared + self.packageorder = packageorder + self.timestamp = time() + + #: Finish event already fired + self.setFinished = False + + @property + def id(self): + self.m.core.log.debug("Deprecated package attr .id, use .pid instead") + return self.pid + + def isStale(self): + return self.timestamp + 30 * 60 > time() + + def toInfoData(self): + return PackageInfo(self.pid, self.name, self.folder, self.root, self.ownerid, self.site, + self.comment, self.password, self.added, self.tags, self.status, self.shared, self.packageorder + ) + + def getChildren(self): + """get information about contained links""" + return self.m.getPackageData(self.pid)["links"] + + def getPath(self, name=""): + self.timestamp = time() + return join(self.m.getPackage(self.root).getPath(), self.folder, name) + + def sync(self): + """sync with db""" + self.m.updatePackage(self) + + def release(self): + """sync and delete from cache""" + self.sync() + self.m.releasePackage(self.id) + + def delete(self): + self.m.deletePackage(self.id) + + def deleteIfEmpty(self): + """ True if deleted """ + if not len(self.getChildren()): + self.delete() + return True + return False + + def notifyChange(self): + self.m.core.eventManager.dispatchEvent("packageUpdated", self.id) + + +class RootPackage(PyPackage): + def __init__(self, m, owner): + PyPackage.__init__(self, m, -1, "root", "", owner, -2, "", "", "", 0, [], PackageStatus.Ok, False, 0) + + def getPath(self, name=""): + return join(self.m.core.config["general"]["download_folder"], name) + + # no database operations + def sync(self): + pass + + def delete(self): + pass + + def release(self): + pass \ No newline at end of file diff --git a/pyload/datatypes/User.py b/pyload/datatypes/User.py new file mode 100644 index 000000000..31c9a55cc --- /dev/null +++ b/pyload/datatypes/User.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +############################################################################### +# Copyright(c) 2008-2012 pyLoad Team +# http://www.pyload.org +# +# This file is part of pyLoad. +# pyLoad is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# Subjected to the terms and conditions in LICENSE +# +# @author: RaNaN +############################################################################### + + +from pyload.Api import UserData, Permission, Role +from pyload.utils import bits_set + +#TODO: activate user +#noinspection PyUnresolvedReferences +class User(UserData): + + @staticmethod + def fromUserData(api, user): + return User(api, user.uid, user.name, user.email, user.role, user.permission, user.folder, + user.traffic, user.dllimit, user.dlquota, user.hddquota, user.user, user.templateName) + + def __init__(self, api, *args, **kwargs): + UserData.__init__(self, *args, **kwargs) + self.api = api + + + def toUserData(self): + # TODO + return UserData() + + def hasPermission(self, perms): + """ Accepts permission bit or name """ + if isinstance(perms, basestring) and hasattr(Permission, perms): + perms = getattr(Permission, perms) + + return bits_set(perms, self.permission) + + def hasRole(self, role): + if isinstance(role, basestring) and hasattr(Role, role): + role = getattr(Role, role) + + return self.role == role + + def isAdmin(self): + return self.hasRole(Role.Admin) + + @property + def primary(self): + """ Primary user id, Internal user handle used for most operations + Secondary user account share id with primary user. Only Admins have no primary id. """ + if self.hasRole(Role.Admin): + return None + return self.user if self.user else self.uid \ No newline at end of file diff --git a/pyload/datatypes/__init__.py b/pyload/datatypes/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pyload/interaction/EventManager.py b/pyload/interaction/EventManager.py new file mode 100644 index 000000000..7d37ca6b9 --- /dev/null +++ b/pyload/interaction/EventManager.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- + +from threading import Lock +from traceback import print_exc + +class EventManager: + """ + Handles all event-related tasks, also stores an event queue for clients, so they can retrieve them later. + + **Known Events:** + Most addon methods exist as events. These are some additional known events. + + ===================== ================ =========================================================== + Name Arguments Description + ===================== ================ =========================================================== + event eventName, *args Called for every event, with eventName and original args + download:preparing fid A download was just queued and will be prepared now. + download:start fid A plugin will immediately start the download afterwards. + download:allProcessed All links were handled, pyLoad would idle afterwards. + download:allFinished All downloads in the queue are finished. + config:changed sec, opt, value The config was changed. + ===================== ================ =========================================================== + + | Notes: + | download:allProcessed is *always* called before download:allFinished. + """ + + def __init__(self, core): + self.core = core + self.log = core.log + + # uuid : list of events + self.clients = {} + self.events = {"event": []} + + self.lock = Lock() + + def getEvents(self, uuid): + """ Get accumulated events for uuid since last call, this also registers a new client """ + if uuid not in self.clients: + self.clients[uuid] = Client() + return self.clients[uuid].get() + + def addEvent(self, event, func): + """Adds an event listener for event name""" + if event in self.events: + if func in self.events[event]: + self.log.debug("Function already registered %s" % func) + else: + self.events[event].append(func) + else: + self.events[event] = [func] + + def removeEvent(self, event, func): + """removes previously added event listener""" + if event in self.events: + self.events[event].remove(func) + + def removeFromEvents(self, func): + """ Removes func from all known events """ + for name, events in self.events.iteritems(): + if func in events: + events.remove(func) + + def dispatchEvent(self, event, *args): + """dispatches event with args""" + for f in self.events["event"]: + try: + f(event, *args) + except Exception, e: + self.log.warning("Error calling event handler %s: %s, %s, %s" + % ("event", f, args, str(e))) + if self.core.debug: + print_exc() + + if event in self.events: + for f in self.events[event]: + try: + f(*args) + except Exception, e: + self.log.warning("Error calling event handler %s: %s, %s, %s" + % (event, f, args, str(e))) + if self.core.debug: + print_exc() \ No newline at end of file diff --git a/pyload/interaction/InteractionManager.py b/pyload/interaction/InteractionManager.py new file mode 100644 index 000000000..9c5449b31 --- /dev/null +++ b/pyload/interaction/InteractionManager.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: RaNaN +""" +from threading import Lock +from time import time +from base64 import standard_b64encode + +from new_collections import OrderedDict + +from pyload.utils import lock, bits_set +from pyload.Api import Interaction as IA +from pyload.Api import InputType, Input + +from InteractionTask import InteractionTask + +class InteractionManager: + """ + Class that gives ability to interact with the user. + Arbitrary tasks with predefined output and input types can be set off. + """ + + # number of seconds a client is classified as active + CLIENT_THRESHOLD = 60 + NOTIFICATION_TIMEOUT = 60 * 60 * 30 + MAX_NOTIFICATIONS = 50 + + def __init__(self, core): + self.lock = Lock() + self.core = core + self.tasks = OrderedDict() #task store, for all outgoing tasks + + self.last_clients = {} + self.ids = 0 #uniue interaction ids + + def isClientConnected(self, user): + return self.last_clients.get(user, 0) + self.CLIENT_THRESHOLD > time() + + @lock + def work(self): + # old notifications will be removed + for n in [k for k, v in self.tasks.iteritems() if v.timedOut()]: + del self.tasks[n] + + # keep notifications count limited + n = [k for k,v in self.tasks.iteritems() if v.type == IA.Notification] + n.reverse() + for v in n[:self.MAX_NOTIFICATIONS]: + del self.tasks[v] + + @lock + def createNotification(self, title, content, desc="", plugin="", owner=None): + """ Creates and queues a new Notification + + :param title: short title + :param content: text content + :param desc: short form of the notification + :param plugin: plugin name + :return: :class:`InteractionTask` + """ + task = InteractionTask(self.ids, IA.Notification, Input(InputType.Text, content), "", title, desc, plugin, + owner=owner) + self.ids += 1 + self.queueTask(task) + return task + + @lock + def createQueryTask(self, input, desc, default="", plugin="", owner=None): + # input type was given, create a input widget + if type(input) == int: + input = Input(input) + if not isinstance(input, Input): + raise TypeError("'Input' class expected not '%s'" % type(input)) + + task = InteractionTask(self.ids, IA.Query, input, default, _("Query"), desc, plugin, owner=owner) + self.ids += 1 + self.queueTask(task) + return task + + @lock + def createCaptchaTask(self, img, format, filename, plugin="", type=InputType.Text, owner=None): + """ Createss a new captcha task. + + :param img: image content (not base encoded) + :param format: img format + :param type: :class:`InputType` + :return: + """ + if type == 'textual': + type = InputType.Text + elif type == 'positional': + type = InputType.Click + + input = Input(type, [standard_b64encode(img), format, filename]) + + #todo: title desc plugin + task = InteractionTask(self.ids, IA.Captcha, input, + None, _("Captcha request"), _("Please solve the captcha."), plugin, owner=owner) + + self.ids += 1 + self.queueTask(task) + return task + + @lock + def removeTask(self, task): + if task.iid in self.tasks: + del self.tasks[task.iid] + self.core.evm.dispatchEvent("interaction:deleted", task.iid) + + @lock + def getTaskByID(self, iid): + return self.tasks.get(iid, None) + + @lock + def getTasks(self, user, mode=IA.All): + # update last active clients + self.last_clients[user] = time() + + # filter current mode + tasks = [t for t in self.tasks.itervalues() if mode == IA.All or bits_set(t.type, mode)] + # filter correct user / or shared + tasks = [t for t in tasks if user is None or user == t.owner or t.shared] + + return tasks + + def isTaskWaiting(self, user, mode=IA.All): + tasks = [t for t in self.getTasks(user, mode) if not t.type == IA.Notification or not t.seen] + return len(tasks) > 0 + + def queueTask(self, task): + cli = self.isClientConnected(task.owner) + + # set waiting times based on threshold + if cli: + task.setWaiting(self.CLIENT_THRESHOLD) + else: # TODO: higher threshold after client connects? + task.setWaiting(self.CLIENT_THRESHOLD / 3) + + if task.type == IA.Notification: + task.setWaiting(self.NOTIFICATION_TIMEOUT) # notifications are valid for 30h + + for plugin in self.core.addonManager.activePlugins(): + try: + plugin.newInteractionTask(task) + except: + self.core.print_exc() + + self.tasks[task.iid] = task + self.core.evm.dispatchEvent("interaction:added", task) + + +if __name__ == "__main__": + it = InteractionTask() \ No newline at end of file diff --git a/pyload/interaction/InteractionTask.py b/pyload/interaction/InteractionTask.py new file mode 100644 index 000000000..b404aa6ce --- /dev/null +++ b/pyload/interaction/InteractionTask.py @@ -0,0 +1,100 @@ +# -*- coding: utf-8 -*- +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: RaNaN +""" + +from time import time + +from pyload.Api import InteractionTask as BaseInteractionTask +from pyload.Api import Interaction, InputType, Input + +#noinspection PyUnresolvedReferences +class InteractionTask(BaseInteractionTask): + """ + General Interaction Task extends ITask defined by api with additional fields and methods. + """ + #: Plugins can put needed data here + storage = None + #: Timestamp when task expires + wait_until = 0 + #: The received result + result = None + #: List of registered handles + handler = None + #: Error Message + error = None + #: Timeout locked + locked = False + #: A task that was retrieved counts as seen + seen = False + #: A task that is relevant to every user + shared = False + #: primary uid of the owner + owner = None + + def __init__(self, *args, **kwargs): + if 'owner' in kwargs: + self.owner = kwargs['owner'] + del kwargs['owner'] + if 'shared' in kwargs: + self.shared = kwargs['shared'] + del kwargs['shared'] + + BaseInteractionTask.__init__(self, *args, **kwargs) + + # additional internal attributes + self.storage = {} + self.handler = [] + self.wait_until = 0 + + def convertResult(self, value): + #TODO: convert based on input/output + return value + + def getResult(self): + return self.result + + def setShared(self): + """ enable shared mode, should not be reversed""" + self.shared = True + + def setResult(self, value): + self.result = self.convertResult(value) + + def setWaiting(self, sec, lock=False): + """ sets waiting in seconds from now, < 0 can be used as infinitive """ + if not self.locked: + if sec < 0: + self.wait_until = -1 + else: + self.wait_until = max(time() + sec, self.wait_until) + + if lock: self.locked = True + + def isWaiting(self): + if self.result or self.error or self.timedOut(): + return False + + return True + + def timedOut(self): + return time() > self.wait_until > -1 + + def correct(self): + [x.taskCorrect(self) for x in self.handler] + + def invalid(self): + [x.taskInvalid(self) for x in self.handler] \ No newline at end of file diff --git a/pyload/interaction/__init__.py b/pyload/interaction/__init__.py new file mode 100644 index 000000000..de6d13128 --- /dev/null +++ b/pyload/interaction/__init__.py @@ -0,0 +1,2 @@ +__author__ = 'christian' + \ No newline at end of file diff --git a/pyload/lib/Getch.py b/pyload/lib/Getch.py new file mode 100644 index 000000000..22b7ea7f8 --- /dev/null +++ b/pyload/lib/Getch.py @@ -0,0 +1,76 @@ +class Getch: + """ + Gets a single character from standard input. Does not echo to + the screen. + """ + + def __init__(self): + try: + self.impl = _GetchWindows() + except ImportError: + try: + self.impl = _GetchMacCarbon() + except(AttributeError, ImportError): + self.impl = _GetchUnix() + + def __call__(self): return self.impl() + + +class _GetchUnix: + def __init__(self): + import tty + import sys + + def __call__(self): + import sys + import tty + import termios + + fd = sys.stdin.fileno() + old_settings = termios.tcgetattr(fd) + try: + tty.setraw(sys.stdin.fileno()) + ch = sys.stdin.read(1) + finally: + termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) + return ch + + +class _GetchWindows: + def __init__(self): + import msvcrt + + def __call__(self): + import msvcrt + + return msvcrt.getch() + +class _GetchMacCarbon: + """ + A function which returns the current ASCII key that is down; + if no ASCII key is down, the null string is returned. The + page http://www.mactech.com/macintosh-c/chap02-1.html was + very helpful in figuring out how to do this. + """ + + def __init__(self): + import Carbon + Carbon.Evt #see if it has this (in Unix, it doesn't) + + def __call__(self): + import Carbon + + if Carbon.Evt.EventAvail(0x0008)[0] == 0: # 0x0008 is the keyDownMask + return '' + else: + # + # The event contains the following info: + # (what,msg,when,where,mod)=Carbon.Evt.GetNextEvent(0x0008)[1] + # + # The message (msg) contains the ASCII char which is + # extracted with the 0x000000FF charCodeMask; this + # number is converted to an ASCII character with chr() and + # returned + # + (what, msg, when, where, mod) = Carbon.Evt.GetNextEvent(0x0008)[1] + return chr(msg) \ No newline at end of file diff --git a/pyload/lib/ReadWriteLock.py b/pyload/lib/ReadWriteLock.py new file mode 100644 index 000000000..cc82f3d48 --- /dev/null +++ b/pyload/lib/ReadWriteLock.py @@ -0,0 +1,232 @@ +# -*- coding: iso-8859-15 -*- +"""locks.py - Read-Write lock thread lock implementation + +See the class documentation for more info. + +Copyright (C) 2007, Heiko Wundram. +Released under the BSD-license. + +http://code.activestate.com/recipes/502283-read-write-lock-class-rlock-like/ +""" + +# Imports +# ------- + +from threading import Condition, Lock, currentThread +from time import time + + +# Read write lock +# --------------- + +class ReadWriteLock(object): + """Read-Write lock class. A read-write lock differs from a standard + threading.RLock() by allowing multiple threads to simultaneously hold a + read lock, while allowing only a single thread to hold a write lock at the + same point of time. + + When a read lock is requested while a write lock is held, the reader + is blocked; when a write lock is requested while another write lock is + held or there are read locks, the writer is blocked. + + Writers are always preferred by this implementation: if there are blocked + threads waiting for a write lock, current readers may request more read + locks (which they eventually should free, as they starve the waiting + writers otherwise), but a new thread requesting a read lock will not + be granted one, and block. This might mean starvation for readers if + two writer threads interweave their calls to acquireWrite() without + leaving a window only for readers. + + In case a current reader requests a write lock, this can and will be + satisfied without giving up the read locks first, but, only one thread + may perform this kind of lock upgrade, as a deadlock would otherwise + occur. After the write lock has been granted, the thread will hold a + full write lock, and not be downgraded after the upgrading call to + acquireWrite() has been match by a corresponding release(). + """ + + def __init__(self): + """Initialize this read-write lock.""" + + # Condition variable, used to signal waiters of a change in object + # state. + self.__condition = Condition(Lock()) + + # Initialize with no writers. + self.__writer = None + self.__upgradewritercount = 0 + self.__pendingwriters = [] + + # Initialize with no readers. + self.__readers = {} + + def acquire(self, blocking=True, timeout=None, shared=False): + if shared: + self.acquireRead(timeout) + else: + self.acquireWrite(timeout) + + def acquireRead(self, timeout=None): + """Acquire a read lock for the current thread, waiting at most + timeout seconds or doing a non-blocking check in case timeout is <= 0. + + In case timeout is None, the call to acquireRead blocks until the + lock request can be serviced. + + In case the timeout expires before the lock could be serviced, a + RuntimeError is thrown.""" + + if timeout is not None: + endtime = time() + timeout + me = currentThread() + self.__condition.acquire() + try: + if self.__writer is me: + # If we are the writer, grant a new read lock, always. + self.__writercount += 1 + return + while True: + if self.__writer is None: + # Only test anything if there is no current writer. + if self.__upgradewritercount or self.__pendingwriters: + if me in self.__readers: + # Only grant a read lock if we already have one + # in case writers are waiting for their turn. + # This means that writers can't easily get starved + # (but see below, readers can). + self.__readers[me] += 1 + return + # No, we aren't a reader (yet), wait for our turn. + else: + # Grant a new read lock, always, in case there are + # no pending writers (and no writer). + self.__readers[me] = self.__readers.get(me, 0) + 1 + return + if timeout is not None: + remaining = endtime - time() + if remaining <= 0: + # Timeout has expired, signal caller of this. + raise RuntimeError("Acquiring read lock timed out") + self.__condition.wait(remaining) + else: + self.__condition.wait() + finally: + self.__condition.release() + + def acquireWrite(self, timeout=None): + """Acquire a write lock for the current thread, waiting at most + timeout seconds or doing a non-blocking check in case timeout is <= 0. + + In case the write lock cannot be serviced due to the deadlock + condition mentioned above, a ValueError is raised. + + In case timeout is None, the call to acquireWrite blocks until the + lock request can be serviced. + + In case the timeout expires before the lock could be serviced, a + RuntimeError is thrown.""" + + if timeout is not None: + endtime = time() + timeout + me, upgradewriter = currentThread(), False + self.__condition.acquire() + try: + if self.__writer is me: + # If we are the writer, grant a new write lock, always. + self.__writercount += 1 + return + elif me in self.__readers: + # If we are a reader, no need to add us to pendingwriters, + # we get the upgradewriter slot. + if self.__upgradewritercount: + # If we are a reader and want to upgrade, and someone + # else also wants to upgrade, there is no way we can do + # this except if one of us releases all his read locks. + # Signal this to user. + raise ValueError( + "Inevitable dead lock, denying write lock" + ) + upgradewriter = True + self.__upgradewritercount = self.__readers.pop(me) + else: + # We aren't a reader, so add us to the pending writers queue + # for synchronization with the readers. + self.__pendingwriters.append(me) + while True: + if not self.__readers and self.__writer is None: + # Only test anything if there are no readers and writers. + if self.__upgradewritercount: + if upgradewriter: + # There is a writer to upgrade, and it's us. Take + # the write lock. + self.__writer = me + self.__writercount = self.__upgradewritercount + 1 + self.__upgradewritercount = 0 + return + # There is a writer to upgrade, but it's not us. + # Always leave the upgrade writer the advance slot, + # because he presumes he'll get a write lock directly + # from a previously held read lock. + elif self.__pendingwriters[0] is me: + # If there are no readers and writers, it's always + # fine for us to take the writer slot, removing us + # from the pending writers queue. + # This might mean starvation for readers, though. + self.__writer = me + self.__writercount = 1 + self.__pendingwriters = self.__pendingwriters[1:] + return + if timeout is not None: + remaining = endtime - time() + if remaining <= 0: + # Timeout has expired, signal caller of this. + if upgradewriter: + # Put us back on the reader queue. No need to + # signal anyone of this change, because no other + # writer could've taken our spot before we got + # here (because of remaining readers), as the test + # for proper conditions is at the start of the + # loop, not at the end. + self.__readers[me] = self.__upgradewritercount + self.__upgradewritercount = 0 + else: + # We were a simple pending writer, just remove us + # from the FIFO list. + self.__pendingwriters.remove(me) + raise RuntimeError("Acquiring write lock timed out") + self.__condition.wait(remaining) + else: + self.__condition.wait() + finally: + self.__condition.release() + + def release(self): + """Release the currently held lock. + + In case the current thread holds no lock, a ValueError is thrown.""" + + me = currentThread() + self.__condition.acquire() + try: + if self.__writer is me: + # We are the writer, take one nesting depth away. + self.__writercount -= 1 + if not self.__writercount: + # No more write locks; take our writer position away and + # notify waiters of the new circumstances. + self.__writer = None + self.__condition.notifyAll() + elif me in self.__readers: + # We are a reader currently, take one nesting depth away. + self.__readers[me] -= 1 + if not self.__readers[me]: + # No more read locks, take our reader position away. + del self.__readers[me] + if not self.__readers: + # No more readers, notify waiters of the new + # circumstances. + self.__condition.notifyAll() + else: + raise ValueError("Trying to release unheld lock") + finally: + self.__condition.release() diff --git a/pyload/lib/SafeEval.py b/pyload/lib/SafeEval.py new file mode 100644 index 000000000..8fc57f261 --- /dev/null +++ b/pyload/lib/SafeEval.py @@ -0,0 +1,47 @@ +## {{{ http://code.activestate.com/recipes/286134/ (r3) (modified) +import dis + +_const_codes = map(dis.opmap.__getitem__, [ + 'POP_TOP','ROT_TWO','ROT_THREE','ROT_FOUR','DUP_TOP', + 'BUILD_LIST','BUILD_MAP','BUILD_TUPLE', + 'LOAD_CONST','RETURN_VALUE','STORE_SUBSCR' + ]) + + +_load_names = ['False', 'True', 'null', 'true', 'false'] + +_locals = {'null': None, 'true': True, 'false': False} + +def _get_opcodes(codeobj): + i = 0 + opcodes = [] + s = codeobj.co_code + names = codeobj.co_names + while i < len(s): + code = ord(s[i]) + opcodes.append(code) + if code >= dis.HAVE_ARGUMENT: + i += 3 + else: + i += 1 + return opcodes, names + +def test_expr(expr, allowed_codes): + try: + c = compile(expr, "", "eval") + except: + raise ValueError, "%s is not a valid expression" % expr + codes, names = _get_opcodes(c) + for code in codes: + if code not in allowed_codes: + for n in names: + if n not in _load_names: + raise ValueError, "opcode %s not allowed" % dis.opname[code] + return c + + +def const_eval(expr): + c = test_expr(expr, _const_codes) + return eval(c, None, _locals) + +## end of http://code.activestate.com/recipes/286134/ }}} diff --git a/pyload/lib/__init__.py b/pyload/lib/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pyload/lib/beaker/__init__.py b/pyload/lib/beaker/__init__.py new file mode 100644 index 000000000..792d60054 --- /dev/null +++ b/pyload/lib/beaker/__init__.py @@ -0,0 +1 @@ +# diff --git a/pyload/lib/beaker/cache.py b/pyload/lib/beaker/cache.py new file mode 100644 index 000000000..4a96537ff --- /dev/null +++ b/pyload/lib/beaker/cache.py @@ -0,0 +1,459 @@ +"""Cache object + +The Cache object is used to manage a set of cache files and their +associated backend. The backends can be rotated on the fly by +specifying an alternate type when used. + +Advanced users can add new backends in beaker.backends + +""" + +import warnings + +import beaker.container as container +import beaker.util as util +from beaker.exceptions import BeakerException, InvalidCacheBackendError + +import beaker.ext.memcached as memcached +import beaker.ext.database as database +import beaker.ext.sqla as sqla +import beaker.ext.google as google + +# Initialize the basic available backends +clsmap = { + 'memory':container.MemoryNamespaceManager, + 'dbm':container.DBMNamespaceManager, + 'file':container.FileNamespaceManager, + 'ext:memcached':memcached.MemcachedNamespaceManager, + 'ext:database':database.DatabaseNamespaceManager, + 'ext:sqla': sqla.SqlaNamespaceManager, + 'ext:google': google.GoogleNamespaceManager, + } + +# Initialize the cache region dict +cache_regions = {} +cache_managers = {} + +try: + import pkg_resources + + # Load up the additional entry point defined backends + for entry_point in pkg_resources.iter_entry_points('beaker.backends'): + try: + NamespaceManager = entry_point.load() + name = entry_point.name + if name in clsmap: + raise BeakerException("NamespaceManager name conflict,'%s' " + "already loaded" % name) + clsmap[name] = NamespaceManager + except (InvalidCacheBackendError, SyntaxError): + # Ignore invalid backends + pass + except: + import sys + from pkg_resources import DistributionNotFound + # Warn when there's a problem loading a NamespaceManager + if not isinstance(sys.exc_info()[1], DistributionNotFound): + import traceback + from StringIO import StringIO + tb = StringIO() + traceback.print_exc(file=tb) + warnings.warn("Unable to load NamespaceManager entry point: '%s': " + "%s" % (entry_point, tb.getvalue()), RuntimeWarning, + 2) +except ImportError: + pass + + + + +def cache_region(region, *deco_args): + """Decorate a function to cache itself using a cache region + + The region decorator requires arguments if there are more than + 2 of the same named function, in the same module. This is + because the namespace used for the functions cache is based on + the functions name and the module. + + + Example:: + + # Add cache region settings to beaker: + beaker.cache.cache_regions.update(dict_of_config_region_options)) + + @cache_region('short_term', 'some_data') + def populate_things(search_term, limit, offset): + return load_the_data(search_term, limit, offset) + + return load('rabbits', 20, 0) + + .. note:: + + The function being decorated must only be called with + positional arguments. + + """ + cache = [None] + + def decorate(func): + namespace = util.func_namespace(func) + def cached(*args): + reg = cache_regions[region] + if not reg.get('enabled', True): + return func(*args) + + if not cache[0]: + if region not in cache_regions: + raise BeakerException('Cache region not configured: %s' % region) + cache[0] = Cache._get_cache(namespace, reg) + + cache_key = " ".join(map(str, deco_args + args)) + def go(): + return func(*args) + + return cache[0].get_value(cache_key, createfunc=go) + cached._arg_namespace = namespace + cached._arg_region = region + return cached + return decorate + + +def region_invalidate(namespace, region, *args): + """Invalidate a cache region namespace or decorated function + + This function only invalidates cache spaces created with the + cache_region decorator. + + :param namespace: Either the namespace of the result to invalidate, or the + cached function reference + + :param region: The region the function was cached to. If the function was + cached to a single region then this argument can be None + + :param args: Arguments that were used to differentiate the cached + function as well as the arguments passed to the decorated + function + + Example:: + + # Add cache region settings to beaker: + beaker.cache.cache_regions.update(dict_of_config_region_options)) + + def populate_things(invalidate=False): + + @cache_region('short_term', 'some_data') + def load(search_term, limit, offset): + return load_the_data(search_term, limit, offset) + + # If the results should be invalidated first + if invalidate: + region_invalidate(load, None, 'some_data', + 'rabbits', 20, 0) + return load('rabbits', 20, 0) + + """ + if callable(namespace): + if not region: + region = namespace._arg_region + namespace = namespace._arg_namespace + + if not region: + raise BeakerException("Region or callable function " + "namespace is required") + else: + region = cache_regions[region] + + cache = Cache._get_cache(namespace, region) + cache_key = " ".join(str(x) for x in args) + cache.remove_value(cache_key) + + +class Cache(object): + """Front-end to the containment API implementing a data cache. + + :param namespace: the namespace of this Cache + + :param type: type of cache to use + + :param expire: seconds to keep cached data + + :param expiretime: seconds to keep cached data (legacy support) + + :param starttime: time when cache was cache was + + """ + def __init__(self, namespace, type='memory', expiretime=None, + starttime=None, expire=None, **nsargs): + try: + cls = clsmap[type] + if isinstance(cls, InvalidCacheBackendError): + raise cls + except KeyError: + raise TypeError("Unknown cache implementation %r" % type) + + self.namespace = cls(namespace, **nsargs) + self.expiretime = expiretime or expire + self.starttime = starttime + self.nsargs = nsargs + + @classmethod + def _get_cache(cls, namespace, kw): + key = namespace + str(kw) + try: + return cache_managers[key] + except KeyError: + cache_managers[key] = cache = cls(namespace, **kw) + return cache + + def put(self, key, value, **kw): + self._get_value(key, **kw).set_value(value) + set_value = put + + def get(self, key, **kw): + """Retrieve a cached value from the container""" + return self._get_value(key, **kw).get_value() + get_value = get + + def remove_value(self, key, **kw): + mycontainer = self._get_value(key, **kw) + if mycontainer.has_current_value(): + mycontainer.clear_value() + remove = remove_value + + def _get_value(self, key, **kw): + if isinstance(key, unicode): + key = key.encode('ascii', 'backslashreplace') + + if 'type' in kw: + return self._legacy_get_value(key, **kw) + + kw.setdefault('expiretime', self.expiretime) + kw.setdefault('starttime', self.starttime) + + return container.Value(key, self.namespace, **kw) + + @util.deprecated("Specifying a " + "'type' and other namespace configuration with cache.get()/put()/etc. " + "is deprecated. Specify 'type' and other namespace configuration to " + "cache_manager.get_cache() and/or the Cache constructor instead.") + def _legacy_get_value(self, key, type, **kw): + expiretime = kw.pop('expiretime', self.expiretime) + starttime = kw.pop('starttime', None) + createfunc = kw.pop('createfunc', None) + kwargs = self.nsargs.copy() + kwargs.update(kw) + c = Cache(self.namespace.namespace, type=type, **kwargs) + return c._get_value(key, expiretime=expiretime, createfunc=createfunc, + starttime=starttime) + + def clear(self): + """Clear all the values from the namespace""" + self.namespace.remove() + + # dict interface + def __getitem__(self, key): + return self.get(key) + + def __contains__(self, key): + return self._get_value(key).has_current_value() + + def has_key(self, key): + return key in self + + def __delitem__(self, key): + self.remove_value(key) + + def __setitem__(self, key, value): + self.put(key, value) + + +class CacheManager(object): + def __init__(self, **kwargs): + """Initialize a CacheManager object with a set of options + + Options should be parsed with the + :func:`~beaker.util.parse_cache_config_options` function to + ensure only valid options are used. + + """ + self.kwargs = kwargs + self.regions = kwargs.pop('cache_regions', {}) + + # Add these regions to the module global + cache_regions.update(self.regions) + + def get_cache(self, name, **kwargs): + kw = self.kwargs.copy() + kw.update(kwargs) + return Cache._get_cache(name, kw) + + def get_cache_region(self, name, region): + if region not in self.regions: + raise BeakerException('Cache region not configured: %s' % region) + kw = self.regions[region] + return Cache._get_cache(name, kw) + + def region(self, region, *args): + """Decorate a function to cache itself using a cache region + + The region decorator requires arguments if there are more than + 2 of the same named function, in the same module. This is + because the namespace used for the functions cache is based on + the functions name and the module. + + + Example:: + + # Assuming a cache object is available like: + cache = CacheManager(dict_of_config_options) + + + def populate_things(): + + @cache.region('short_term', 'some_data') + def load(search_term, limit, offset): + return load_the_data(search_term, limit, offset) + + return load('rabbits', 20, 0) + + .. note:: + + The function being decorated must only be called with + positional arguments. + + """ + return cache_region(region, *args) + + def region_invalidate(self, namespace, region, *args): + """Invalidate a cache region namespace or decorated function + + This function only invalidates cache spaces created with the + cache_region decorator. + + :param namespace: Either the namespace of the result to invalidate, or the + name of the cached function + + :param region: The region the function was cached to. If the function was + cached to a single region then this argument can be None + + :param args: Arguments that were used to differentiate the cached + function as well as the arguments passed to the decorated + function + + Example:: + + # Assuming a cache object is available like: + cache = CacheManager(dict_of_config_options) + + def populate_things(invalidate=False): + + @cache.region('short_term', 'some_data') + def load(search_term, limit, offset): + return load_the_data(search_term, limit, offset) + + # If the results should be invalidated first + if invalidate: + cache.region_invalidate(load, None, 'some_data', + 'rabbits', 20, 0) + return load('rabbits', 20, 0) + + + """ + return region_invalidate(namespace, region, *args) + if callable(namespace): + if not region: + region = namespace._arg_region + namespace = namespace._arg_namespace + + if not region: + raise BeakerException("Region or callable function " + "namespace is required") + else: + region = self.regions[region] + + cache = self.get_cache(namespace, **region) + cache_key = " ".join(str(x) for x in args) + cache.remove_value(cache_key) + + def cache(self, *args, **kwargs): + """Decorate a function to cache itself with supplied parameters + + :param args: Used to make the key unique for this function, as in region() + above. + + :param kwargs: Parameters to be passed to get_cache(), will override defaults + + Example:: + + # Assuming a cache object is available like: + cache = CacheManager(dict_of_config_options) + + + def populate_things(): + + @cache.cache('mycache', expire=15) + def load(search_term, limit, offset): + return load_the_data(search_term, limit, offset) + + return load('rabbits', 20, 0) + + .. note:: + + The function being decorated must only be called with + positional arguments. + + """ + cache = [None] + key = " ".join(str(x) for x in args) + + def decorate(func): + namespace = util.func_namespace(func) + def cached(*args): + if not cache[0]: + cache[0] = self.get_cache(namespace, **kwargs) + cache_key = key + " " + " ".join(str(x) for x in args) + def go(): + return func(*args) + return cache[0].get_value(cache_key, createfunc=go) + cached._arg_namespace = namespace + return cached + return decorate + + def invalidate(self, func, *args, **kwargs): + """Invalidate a cache decorated function + + This function only invalidates cache spaces created with the + cache decorator. + + :param func: Decorated function to invalidate + + :param args: Used to make the key unique for this function, as in region() + above. + + :param kwargs: Parameters that were passed for use by get_cache(), note that + this is only required if a ``type`` was specified for the + function + + Example:: + + # Assuming a cache object is available like: + cache = CacheManager(dict_of_config_options) + + + def populate_things(invalidate=False): + + @cache.cache('mycache', type="file", expire=15) + def load(search_term, limit, offset): + return load_the_data(search_term, limit, offset) + + # If the results should be invalidated first + if invalidate: + cache.invalidate(load, 'mycache', 'rabbits', 20, 0, type="file") + return load('rabbits', 20, 0) + + """ + namespace = func._arg_namespace + + cache = self.get_cache(namespace, **kwargs) + cache_key = " ".join(str(x) for x in args) + cache.remove_value(cache_key) diff --git a/pyload/lib/beaker/container.py b/pyload/lib/beaker/container.py new file mode 100644 index 000000000..515e97af6 --- /dev/null +++ b/pyload/lib/beaker/container.py @@ -0,0 +1,633 @@ +"""Container and Namespace classes""" +import anydbm +import cPickle +import logging +import os +import time + +import beaker.util as util +from beaker.exceptions import CreationAbortedError, MissingCacheParameter +from beaker.synchronization import _threading, file_synchronizer, \ + mutex_synchronizer, NameLock, null_synchronizer + +__all__ = ['Value', 'Container', 'ContainerContext', + 'MemoryContainer', 'DBMContainer', 'NamespaceManager', + 'MemoryNamespaceManager', 'DBMNamespaceManager', 'FileContainer', + 'OpenResourceNamespaceManager', + 'FileNamespaceManager', 'CreationAbortedError'] + + +logger = logging.getLogger('beaker.container') +if logger.isEnabledFor(logging.DEBUG): + debug = logger.debug +else: + def debug(message, *args): + pass + + +class NamespaceManager(object): + """Handles dictionary operations and locking for a namespace of + values. + + The implementation for setting and retrieving the namespace data is + handled by subclasses. + + NamespaceManager may be used alone, or may be privately accessed by + one or more Container objects. Container objects provide per-key + services like expiration times and automatic recreation of values. + + Multiple NamespaceManagers created with a particular name will all + share access to the same underlying datasource and will attempt to + synchronize against a common mutex object. The scope of this + sharing may be within a single process or across multiple + processes, depending on the type of NamespaceManager used. + + The NamespaceManager itself is generally threadsafe, except in the + case of the DBMNamespaceManager in conjunction with the gdbm dbm + implementation. + + """ + + @classmethod + def _init_dependencies(cls): + pass + + def __init__(self, namespace): + self._init_dependencies() + self.namespace = namespace + + def get_creation_lock(self, key): + raise NotImplementedError() + + def do_remove(self): + raise NotImplementedError() + + def acquire_read_lock(self): + pass + + def release_read_lock(self): + pass + + def acquire_write_lock(self, wait=True): + return True + + def release_write_lock(self): + pass + + def has_key(self, key): + return self.__contains__(key) + + def __getitem__(self, key): + raise NotImplementedError() + + def __setitem__(self, key, value): + raise NotImplementedError() + + def set_value(self, key, value, expiretime=None): + """Optional set_value() method called by Value. + + Allows an expiretime to be passed, for namespace + implementations which can prune their collections + using expiretime. + + """ + self[key] = value + + def __contains__(self, key): + raise NotImplementedError() + + def __delitem__(self, key): + raise NotImplementedError() + + def keys(self): + raise NotImplementedError() + + def remove(self): + self.do_remove() + + +class OpenResourceNamespaceManager(NamespaceManager): + """A NamespaceManager where read/write operations require opening/ + closing of a resource which is possibly mutexed. + + """ + def __init__(self, namespace): + NamespaceManager.__init__(self, namespace) + self.access_lock = self.get_access_lock() + self.openers = 0 + self.mutex = _threading.Lock() + + def get_access_lock(self): + raise NotImplementedError() + + def do_open(self, flags): + raise NotImplementedError() + + def do_close(self): + raise NotImplementedError() + + def acquire_read_lock(self): + self.access_lock.acquire_read_lock() + try: + self.open('r', checkcount = True) + except: + self.access_lock.release_read_lock() + raise + + def release_read_lock(self): + try: + self.close(checkcount = True) + finally: + self.access_lock.release_read_lock() + + def acquire_write_lock(self, wait=True): + r = self.access_lock.acquire_write_lock(wait) + try: + if (wait or r): + self.open('c', checkcount = True) + return r + except: + self.access_lock.release_write_lock() + raise + + def release_write_lock(self): + try: + self.close(checkcount=True) + finally: + self.access_lock.release_write_lock() + + def open(self, flags, checkcount=False): + self.mutex.acquire() + try: + if checkcount: + if self.openers == 0: + self.do_open(flags) + self.openers += 1 + else: + self.do_open(flags) + self.openers = 1 + finally: + self.mutex.release() + + def close(self, checkcount=False): + self.mutex.acquire() + try: + if checkcount: + self.openers -= 1 + if self.openers == 0: + self.do_close() + else: + if self.openers > 0: + self.do_close() + self.openers = 0 + finally: + self.mutex.release() + + def remove(self): + self.access_lock.acquire_write_lock() + try: + self.close(checkcount=False) + self.do_remove() + finally: + self.access_lock.release_write_lock() + +class Value(object): + __slots__ = 'key', 'createfunc', 'expiretime', 'expire_argument', 'starttime', 'storedtime',\ + 'namespace' + + def __init__(self, key, namespace, createfunc=None, expiretime=None, starttime=None): + self.key = key + self.createfunc = createfunc + self.expire_argument = expiretime + self.starttime = starttime + self.storedtime = -1 + self.namespace = namespace + + def has_value(self): + """return true if the container has a value stored. + + This is regardless of it being expired or not. + + """ + self.namespace.acquire_read_lock() + try: + return self.namespace.has_key(self.key) + finally: + self.namespace.release_read_lock() + + def can_have_value(self): + return self.has_current_value() or self.createfunc is not None + + def has_current_value(self): + self.namespace.acquire_read_lock() + try: + has_value = self.namespace.has_key(self.key) + if has_value: + try: + stored, expired, value = self._get_value() + return not self._is_expired(stored, expired) + except KeyError: + pass + return False + finally: + self.namespace.release_read_lock() + + def _is_expired(self, storedtime, expiretime): + """Return true if this container's value is expired.""" + return ( + ( + self.starttime is not None and + storedtime < self.starttime + ) + or + ( + expiretime is not None and + time.time() >= expiretime + storedtime + ) + ) + + def get_value(self): + self.namespace.acquire_read_lock() + try: + has_value = self.has_value() + if has_value: + try: + stored, expired, value = self._get_value() + if not self._is_expired(stored, expired): + return value + except KeyError: + # guard against un-mutexed backends raising KeyError + has_value = False + + if not self.createfunc: + raise KeyError(self.key) + finally: + self.namespace.release_read_lock() + + has_createlock = False + creation_lock = self.namespace.get_creation_lock(self.key) + if has_value: + if not creation_lock.acquire(wait=False): + debug("get_value returning old value while new one is created") + return value + else: + debug("lock_creatfunc (didnt wait)") + has_createlock = True + + if not has_createlock: + debug("lock_createfunc (waiting)") + creation_lock.acquire() + debug("lock_createfunc (waited)") + + try: + # see if someone created the value already + self.namespace.acquire_read_lock() + try: + if self.has_value(): + try: + stored, expired, value = self._get_value() + if not self._is_expired(stored, expired): + return value + except KeyError: + # guard against un-mutexed backends raising KeyError + pass + finally: + self.namespace.release_read_lock() + + debug("get_value creating new value") + v = self.createfunc() + self.set_value(v) + return v + finally: + creation_lock.release() + debug("released create lock") + + def _get_value(self): + value = self.namespace[self.key] + try: + stored, expired, value = value + except ValueError: + if not len(value) == 2: + raise + # Old format: upgrade + stored, value = value + expired = self.expire_argument + debug("get_value upgrading time %r expire time %r", stored, self.expire_argument) + self.namespace.release_read_lock() + self.set_value(value, stored) + self.namespace.acquire_read_lock() + except TypeError: + # occurs when the value is None. memcached + # may yank the rug from under us in which case + # that's the result + raise KeyError(self.key) + return stored, expired, value + + def set_value(self, value, storedtime=None): + self.namespace.acquire_write_lock() + try: + if storedtime is None: + storedtime = time.time() + debug("set_value stored time %r expire time %r", storedtime, self.expire_argument) + self.namespace.set_value(self.key, (storedtime, self.expire_argument, value)) + finally: + self.namespace.release_write_lock() + + def clear_value(self): + self.namespace.acquire_write_lock() + try: + debug("clear_value") + if self.namespace.has_key(self.key): + try: + del self.namespace[self.key] + except KeyError: + # guard against un-mutexed backends raising KeyError + pass + self.storedtime = -1 + finally: + self.namespace.release_write_lock() + +class AbstractDictionaryNSManager(NamespaceManager): + """A subclassable NamespaceManager that places data in a dictionary. + + Subclasses should provide a "dictionary" attribute or descriptor + which returns a dict-like object. The dictionary will store keys + that are local to the "namespace" attribute of this manager, so + ensure that the dictionary will not be used by any other namespace. + + e.g.:: + + import collections + cached_data = collections.defaultdict(dict) + + class MyDictionaryManager(AbstractDictionaryNSManager): + def __init__(self, namespace): + AbstractDictionaryNSManager.__init__(self, namespace) + self.dictionary = cached_data[self.namespace] + + The above stores data in a global dictionary called "cached_data", + which is structured as a dictionary of dictionaries, keyed + first on namespace name to a sub-dictionary, then on actual + cache key to value. + + """ + + def get_creation_lock(self, key): + return NameLock( + identifier="memorynamespace/funclock/%s/%s" % (self.namespace, key), + reentrant=True + ) + + def __getitem__(self, key): + return self.dictionary[key] + + def __contains__(self, key): + return self.dictionary.__contains__(key) + + def has_key(self, key): + return self.dictionary.__contains__(key) + + def __setitem__(self, key, value): + self.dictionary[key] = value + + def __delitem__(self, key): + del self.dictionary[key] + + def do_remove(self): + self.dictionary.clear() + + def keys(self): + return self.dictionary.keys() + +class MemoryNamespaceManager(AbstractDictionaryNSManager): + namespaces = util.SyncDict() + + def __init__(self, namespace, **kwargs): + AbstractDictionaryNSManager.__init__(self, namespace) + self.dictionary = MemoryNamespaceManager.namespaces.get(self.namespace, + dict) + +class DBMNamespaceManager(OpenResourceNamespaceManager): + def __init__(self, namespace, dbmmodule=None, data_dir=None, + dbm_dir=None, lock_dir=None, digest_filenames=True, **kwargs): + self.digest_filenames = digest_filenames + + if not dbm_dir and not data_dir: + raise MissingCacheParameter("data_dir or dbm_dir is required") + elif dbm_dir: + self.dbm_dir = dbm_dir + else: + self.dbm_dir = data_dir + "/container_dbm" + util.verify_directory(self.dbm_dir) + + if not lock_dir and not data_dir: + raise MissingCacheParameter("data_dir or lock_dir is required") + elif lock_dir: + self.lock_dir = lock_dir + else: + self.lock_dir = data_dir + "/container_dbm_lock" + util.verify_directory(self.lock_dir) + + self.dbmmodule = dbmmodule or anydbm + + self.dbm = None + OpenResourceNamespaceManager.__init__(self, namespace) + + self.file = util.encoded_path(root= self.dbm_dir, + identifiers=[self.namespace], + extension='.dbm', + digest_filenames=self.digest_filenames) + + debug("data file %s", self.file) + self._checkfile() + + def get_access_lock(self): + return file_synchronizer(identifier=self.namespace, + lock_dir=self.lock_dir) + + def get_creation_lock(self, key): + return file_synchronizer( + identifier = "dbmcontainer/funclock/%s" % self.namespace, + lock_dir=self.lock_dir + ) + + def file_exists(self, file): + if os.access(file, os.F_OK): + return True + else: + for ext in ('db', 'dat', 'pag', 'dir'): + if os.access(file + os.extsep + ext, os.F_OK): + return True + + return False + + def _checkfile(self): + if not self.file_exists(self.file): + g = self.dbmmodule.open(self.file, 'c') + g.close() + + def get_filenames(self): + list = [] + if os.access(self.file, os.F_OK): + list.append(self.file) + + for ext in ('pag', 'dir', 'db', 'dat'): + if os.access(self.file + os.extsep + ext, os.F_OK): + list.append(self.file + os.extsep + ext) + return list + + def do_open(self, flags): + debug("opening dbm file %s", self.file) + try: + self.dbm = self.dbmmodule.open(self.file, flags) + except: + self._checkfile() + self.dbm = self.dbmmodule.open(self.file, flags) + + def do_close(self): + if self.dbm is not None: + debug("closing dbm file %s", self.file) + self.dbm.close() + + def do_remove(self): + for f in self.get_filenames(): + os.remove(f) + + def __getitem__(self, key): + return cPickle.loads(self.dbm[key]) + + def __contains__(self, key): + return self.dbm.has_key(key) + + def __setitem__(self, key, value): + self.dbm[key] = cPickle.dumps(value) + + def __delitem__(self, key): + del self.dbm[key] + + def keys(self): + return self.dbm.keys() + + +class FileNamespaceManager(OpenResourceNamespaceManager): + def __init__(self, namespace, data_dir=None, file_dir=None, lock_dir=None, + digest_filenames=True, **kwargs): + self.digest_filenames = digest_filenames + + if not file_dir and not data_dir: + raise MissingCacheParameter("data_dir or file_dir is required") + elif file_dir: + self.file_dir = file_dir + else: + self.file_dir = data_dir + "/container_file" + util.verify_directory(self.file_dir) + + if not lock_dir and not data_dir: + raise MissingCacheParameter("data_dir or lock_dir is required") + elif lock_dir: + self.lock_dir = lock_dir + else: + self.lock_dir = data_dir + "/container_file_lock" + util.verify_directory(self.lock_dir) + OpenResourceNamespaceManager.__init__(self, namespace) + + self.file = util.encoded_path(root=self.file_dir, + identifiers=[self.namespace], + extension='.cache', + digest_filenames=self.digest_filenames) + self.hash = {} + + debug("data file %s", self.file) + + def get_access_lock(self): + return file_synchronizer(identifier=self.namespace, + lock_dir=self.lock_dir) + + def get_creation_lock(self, key): + return file_synchronizer( + identifier = "filecontainer/funclock/%s" % self.namespace, + lock_dir = self.lock_dir + ) + + def file_exists(self, file): + return os.access(file, os.F_OK) + + def do_open(self, flags): + if self.file_exists(self.file): + fh = open(self.file, 'rb') + try: + self.hash = cPickle.load(fh) + except (IOError, OSError, EOFError, cPickle.PickleError, ValueError): + pass + fh.close() + + self.flags = flags + + def do_close(self): + if self.flags == 'c' or self.flags == 'w': + fh = open(self.file, 'wb') + cPickle.dump(self.hash, fh) + fh.close() + + self.hash = {} + self.flags = None + + def do_remove(self): + try: + os.remove(self.file) + except OSError, err: + # for instance, because we haven't yet used this cache, + # but client code has asked for a clear() operation... + pass + self.hash = {} + + def __getitem__(self, key): + return self.hash[key] + + def __contains__(self, key): + return self.hash.has_key(key) + + def __setitem__(self, key, value): + self.hash[key] = value + + def __delitem__(self, key): + del self.hash[key] + + def keys(self): + return self.hash.keys() + + +#### legacy stuff to support the old "Container" class interface + +namespace_classes = {} + +ContainerContext = dict + +class ContainerMeta(type): + def __init__(cls, classname, bases, dict_): + namespace_classes[cls] = cls.namespace_class + return type.__init__(cls, classname, bases, dict_) + def __call__(self, key, context, namespace, createfunc=None, + expiretime=None, starttime=None, **kwargs): + if namespace in context: + ns = context[namespace] + else: + nscls = namespace_classes[self] + context[namespace] = ns = nscls(namespace, **kwargs) + return Value(key, ns, createfunc=createfunc, + expiretime=expiretime, starttime=starttime) + +class Container(object): + __metaclass__ = ContainerMeta + namespace_class = NamespaceManager + +class FileContainer(Container): + namespace_class = FileNamespaceManager + +class MemoryContainer(Container): + namespace_class = MemoryNamespaceManager + +class DBMContainer(Container): + namespace_class = DBMNamespaceManager + +DbmContainer = DBMContainer diff --git a/pyload/lib/beaker/converters.py b/pyload/lib/beaker/converters.py new file mode 100644 index 000000000..f0ad34963 --- /dev/null +++ b/pyload/lib/beaker/converters.py @@ -0,0 +1,26 @@ +# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org) +# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php +def asbool(obj): + if isinstance(obj, (str, unicode)): + obj = obj.strip().lower() + if obj in ['true', 'yes', 'on', 'y', 't', '1']: + return True + elif obj in ['false', 'no', 'off', 'n', 'f', '0']: + return False + else: + raise ValueError( + "String is not true/false: %r" % obj) + return bool(obj) + +def aslist(obj, sep=None, strip=True): + if isinstance(obj, (str, unicode)): + lst = obj.split(sep) + if strip: + lst = [v.strip() for v in lst] + return lst + elif isinstance(obj, (list, tuple)): + return obj + elif obj is None: + return [] + else: + return [obj] diff --git a/pyload/lib/beaker/crypto/__init__.py b/pyload/lib/beaker/crypto/__init__.py new file mode 100644 index 000000000..3e26b0c13 --- /dev/null +++ b/pyload/lib/beaker/crypto/__init__.py @@ -0,0 +1,40 @@ +from warnings import warn + +from beaker.crypto.pbkdf2 import PBKDF2, strxor +from beaker.crypto.util import hmac, sha1, hmac_sha1, md5 +from beaker import util + +keyLength = None + +if util.jython: + try: + from beaker.crypto.jcecrypto import getKeyLength, aesEncrypt + keyLength = getKeyLength() + except ImportError: + pass +else: + try: + from beaker.crypto.pycrypto import getKeyLength, aesEncrypt, aesDecrypt + keyLength = getKeyLength() + except ImportError: + pass + +if not keyLength: + has_aes = False +else: + has_aes = True + +if has_aes and keyLength < 32: + warn('Crypto implementation only supports key lengths up to %d bits. ' + 'Generated session cookies may be incompatible with other ' + 'environments' % (keyLength * 8)) + + +def generateCryptoKeys(master_key, salt, iterations): + # NB: We XOR parts of the keystream into the randomly-generated parts, just + # in case os.urandom() isn't as random as it should be. Note that if + # os.urandom() returns truly random data, this will have no effect on the + # overall security. + keystream = PBKDF2(master_key, salt, iterations=iterations) + cipher_key = keystream.read(keyLength) + return cipher_key diff --git a/pyload/lib/beaker/crypto/jcecrypto.py b/pyload/lib/beaker/crypto/jcecrypto.py new file mode 100644 index 000000000..4062d513e --- /dev/null +++ b/pyload/lib/beaker/crypto/jcecrypto.py @@ -0,0 +1,30 @@ +""" +Encryption module that uses the Java Cryptography Extensions (JCE). + +Note that in default installations of the Java Runtime Environment, the +maximum key length is limited to 128 bits due to US export +restrictions. This makes the generated keys incompatible with the ones +generated by pycryptopp, which has no such restrictions. To fix this, +download the "Unlimited Strength Jurisdiction Policy Files" from Sun, +which will allow encryption using 256 bit AES keys. +""" +from javax.crypto import Cipher +from javax.crypto.spec import SecretKeySpec, IvParameterSpec + +import jarray + +# Initialization vector filled with zeros +_iv = IvParameterSpec(jarray.zeros(16, 'b')) + +def aesEncrypt(data, key): + cipher = Cipher.getInstance('AES/CTR/NoPadding') + skeySpec = SecretKeySpec(key, 'AES') + cipher.init(Cipher.ENCRYPT_MODE, skeySpec, _iv) + return cipher.doFinal(data).tostring() + +# magic. +aesDecrypt = aesEncrypt + +def getKeyLength(): + maxlen = Cipher.getMaxAllowedKeyLength('AES/CTR/NoPadding') + return min(maxlen, 256) / 8 diff --git a/pyload/lib/beaker/crypto/pbkdf2.py b/pyload/lib/beaker/crypto/pbkdf2.py new file mode 100644 index 000000000..96dc5fbb2 --- /dev/null +++ b/pyload/lib/beaker/crypto/pbkdf2.py @@ -0,0 +1,342 @@ +#!/usr/bin/python +# -*- coding: ascii -*- +########################################################################### +# PBKDF2.py - PKCS#5 v2.0 Password-Based Key Derivation +# +# Copyright (C) 2007 Dwayne C. Litzenberger +# All rights reserved. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose and without fee is hereby granted, +# provided that the above copyright notice appear in all copies and that +# both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR PROVIDES THIS SOFTWARE ``AS IS'' AND ANY EXPRESSED OR +# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Country of origin: Canada +# +########################################################################### +# Sample PBKDF2 usage: +# from Crypto.Cipher import AES +# from PBKDF2 import PBKDF2 +# import os +# +# salt = os.urandom(8) # 64-bit salt +# key = PBKDF2("This passphrase is a secret.", salt).read(32) # 256-bit key +# iv = os.urandom(16) # 128-bit IV +# cipher = AES.new(key, AES.MODE_CBC, iv) +# ... +# +# Sample crypt() usage: +# from PBKDF2 import crypt +# pwhash = crypt("secret") +# alleged_pw = raw_input("Enter password: ") +# if pwhash == crypt(alleged_pw, pwhash): +# print "Password good" +# else: +# print "Invalid password" +# +########################################################################### +# History: +# +# 2007-07-27 Dwayne C. Litzenberger +# - Initial Release (v1.0) +# +# 2007-07-31 Dwayne C. Litzenberger +# - Bugfix release (v1.1) +# - SECURITY: The PyCrypto XOR cipher (used, if available, in the _strxor +# function in the previous release) silently truncates all keys to 64 +# bytes. The way it was used in the previous release, this would only be +# problem if the pseudorandom function that returned values larger than +# 64 bytes (so SHA1, SHA256 and SHA512 are fine), but I don't like +# anything that silently reduces the security margin from what is +# expected. +# +########################################################################### + +__version__ = "1.1" + +from struct import pack +from binascii import b2a_hex +from random import randint + +from base64 import b64encode + +from beaker.crypto.util import hmac as HMAC, hmac_sha1 as SHA1 + +def strxor(a, b): + return "".join([chr(ord(x) ^ ord(y)) for (x, y) in zip(a, b)]) + +class PBKDF2(object): + """PBKDF2.py : PKCS#5 v2.0 Password-Based Key Derivation + + This implementation takes a passphrase and a salt (and optionally an + iteration count, a digest module, and a MAC module) and provides a + file-like object from which an arbitrarily-sized key can be read. + + If the passphrase and/or salt are unicode objects, they are encoded as + UTF-8 before they are processed. + + The idea behind PBKDF2 is to derive a cryptographic key from a + passphrase and a salt. + + PBKDF2 may also be used as a strong salted password hash. The + 'crypt' function is provided for that purpose. + + Remember: Keys generated using PBKDF2 are only as strong as the + passphrases they are derived from. + """ + + def __init__(self, passphrase, salt, iterations=1000, + digestmodule=SHA1, macmodule=HMAC): + if not callable(macmodule): + macmodule = macmodule.new + self.__macmodule = macmodule + self.__digestmodule = digestmodule + self._setup(passphrase, salt, iterations, self._pseudorandom) + + def _pseudorandom(self, key, msg): + """Pseudorandom function. e.g. HMAC-SHA1""" + return self.__macmodule(key=key, msg=msg, + digestmod=self.__digestmodule).digest() + + def read(self, bytes): + """Read the specified number of key bytes.""" + if self.closed: + raise ValueError("file-like object is closed") + + size = len(self.__buf) + blocks = [self.__buf] + i = self.__blockNum + while size < bytes: + i += 1 + if i > 0xffffffff: + # We could return "" here, but + raise OverflowError("derived key too long") + block = self.__f(i) + blocks.append(block) + size += len(block) + buf = "".join(blocks) + retval = buf[:bytes] + self.__buf = buf[bytes:] + self.__blockNum = i + return retval + + def __f(self, i): + # i must fit within 32 bits + assert (1 <= i <= 0xffffffff) + U = self.__prf(self.__passphrase, self.__salt + pack("!L", i)) + result = U + for j in xrange(2, 1+self.__iterations): + U = self.__prf(self.__passphrase, U) + result = strxor(result, U) + return result + + def hexread(self, octets): + """Read the specified number of octets. Return them as hexadecimal. + + Note that len(obj.hexread(n)) == 2*n. + """ + return b2a_hex(self.read(octets)) + + def _setup(self, passphrase, salt, iterations, prf): + # Sanity checks: + + # passphrase and salt must be str or unicode (in the latter + # case, we convert to UTF-8) + if isinstance(passphrase, unicode): + passphrase = passphrase.encode("UTF-8") + if not isinstance(passphrase, str): + raise TypeError("passphrase must be str or unicode") + if isinstance(salt, unicode): + salt = salt.encode("UTF-8") + if not isinstance(salt, str): + raise TypeError("salt must be str or unicode") + + # iterations must be an integer >= 1 + if not isinstance(iterations, (int, long)): + raise TypeError("iterations must be an integer") + if iterations < 1: + raise ValueError("iterations must be at least 1") + + # prf must be callable + if not callable(prf): + raise TypeError("prf must be callable") + + self.__passphrase = passphrase + self.__salt = salt + self.__iterations = iterations + self.__prf = prf + self.__blockNum = 0 + self.__buf = "" + self.closed = False + + def close(self): + """Close the stream.""" + if not self.closed: + del self.__passphrase + del self.__salt + del self.__iterations + del self.__prf + del self.__blockNum + del self.__buf + self.closed = True + +def crypt(word, salt=None, iterations=None): + """PBKDF2-based unix crypt(3) replacement. + + The number of iterations specified in the salt overrides the 'iterations' + parameter. + + The effective hash length is 192 bits. + """ + + # Generate a (pseudo-)random salt if the user hasn't provided one. + if salt is None: + salt = _makesalt() + + # salt must be a string or the us-ascii subset of unicode + if isinstance(salt, unicode): + salt = salt.encode("us-ascii") + if not isinstance(salt, str): + raise TypeError("salt must be a string") + + # word must be a string or unicode (in the latter case, we convert to UTF-8) + if isinstance(word, unicode): + word = word.encode("UTF-8") + if not isinstance(word, str): + raise TypeError("word must be a string or unicode") + + # Try to extract the real salt and iteration count from the salt + if salt.startswith("$p5k2$"): + (iterations, salt, dummy) = salt.split("$")[2:5] + if iterations == "": + iterations = 400 + else: + converted = int(iterations, 16) + if iterations != "%x" % converted: # lowercase hex, minimum digits + raise ValueError("Invalid salt") + iterations = converted + if not (iterations >= 1): + raise ValueError("Invalid salt") + + # Make sure the salt matches the allowed character set + allowed = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789./" + for ch in salt: + if ch not in allowed: + raise ValueError("Illegal character %r in salt" % (ch,)) + + if iterations is None or iterations == 400: + iterations = 400 + salt = "$p5k2$$" + salt + else: + salt = "$p5k2$%x$%s" % (iterations, salt) + rawhash = PBKDF2(word, salt, iterations).read(24) + return salt + "$" + b64encode(rawhash, "./") + +# Add crypt as a static method of the PBKDF2 class +# This makes it easier to do "from PBKDF2 import PBKDF2" and still use +# crypt. +PBKDF2.crypt = staticmethod(crypt) + +def _makesalt(): + """Return a 48-bit pseudorandom salt for crypt(). + + This function is not suitable for generating cryptographic secrets. + """ + binarysalt = "".join([pack("@H", randint(0, 0xffff)) for i in range(3)]) + return b64encode(binarysalt, "./") + +def test_pbkdf2(): + """Module self-test""" + from binascii import a2b_hex + + # + # Test vectors from RFC 3962 + # + + # Test 1 + result = PBKDF2("password", "ATHENA.MIT.EDUraeburn", 1).read(16) + expected = a2b_hex("cdedb5281bb2f801565a1122b2563515") + if result != expected: + raise RuntimeError("self-test failed") + + # Test 2 + result = PBKDF2("password", "ATHENA.MIT.EDUraeburn", 1200).hexread(32) + expected = ("5c08eb61fdf71e4e4ec3cf6ba1f5512b" + "a7e52ddbc5e5142f708a31e2e62b1e13") + if result != expected: + raise RuntimeError("self-test failed") + + # Test 3 + result = PBKDF2("X"*64, "pass phrase equals block size", 1200).hexread(32) + expected = ("139c30c0966bc32ba55fdbf212530ac9" + "c5ec59f1a452f5cc9ad940fea0598ed1") + if result != expected: + raise RuntimeError("self-test failed") + + # Test 4 + result = PBKDF2("X"*65, "pass phrase exceeds block size", 1200).hexread(32) + expected = ("9ccad6d468770cd51b10e6a68721be61" + "1a8b4d282601db3b36be9246915ec82a") + if result != expected: + raise RuntimeError("self-test failed") + + # + # Other test vectors + # + + # Chunked read + f = PBKDF2("kickstart", "workbench", 256) + result = f.read(17) + result += f.read(17) + result += f.read(1) + result += f.read(2) + result += f.read(3) + expected = PBKDF2("kickstart", "workbench", 256).read(40) + if result != expected: + raise RuntimeError("self-test failed") + + # + # crypt() test vectors + # + + # crypt 1 + result = crypt("cloadm", "exec") + expected = '$p5k2$$exec$r1EWMCMk7Rlv3L/RNcFXviDefYa0hlql' + if result != expected: + raise RuntimeError("self-test failed") + + # crypt 2 + result = crypt("gnu", '$p5k2$c$u9HvcT4d$.....') + expected = '$p5k2$c$u9HvcT4d$Sd1gwSVCLZYAuqZ25piRnbBEoAesaa/g' + if result != expected: + raise RuntimeError("self-test failed") + + # crypt 3 + result = crypt("dcl", "tUsch7fU", iterations=13) + expected = "$p5k2$d$tUsch7fU$nqDkaxMDOFBeJsTSfABsyn.PYUXilHwL" + if result != expected: + raise RuntimeError("self-test failed") + + # crypt 4 (unicode) + result = crypt(u'\u0399\u03c9\u03b1\u03bd\u03bd\u03b7\u03c2', + '$p5k2$$KosHgqNo$9mjN8gqjt02hDoP0c2J0ABtLIwtot8cQ') + expected = '$p5k2$$KosHgqNo$9mjN8gqjt02hDoP0c2J0ABtLIwtot8cQ' + if result != expected: + raise RuntimeError("self-test failed") + +if __name__ == '__main__': + test_pbkdf2() + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/pyload/lib/beaker/crypto/pycrypto.py b/pyload/lib/beaker/crypto/pycrypto.py new file mode 100644 index 000000000..a3eb4d9db --- /dev/null +++ b/pyload/lib/beaker/crypto/pycrypto.py @@ -0,0 +1,31 @@ +"""Encryption module that uses pycryptopp or pycrypto""" +try: + # Pycryptopp is preferred over Crypto because Crypto has had + # various periods of not being maintained, and pycryptopp uses + # the Crypto++ library which is generally considered the 'gold standard' + # of crypto implementations + from pycryptopp.cipher import aes + + def aesEncrypt(data, key): + cipher = aes.AES(key) + return cipher.process(data) + + # magic. + aesDecrypt = aesEncrypt + +except ImportError: + from Crypto.Cipher import AES + + def aesEncrypt(data, key): + cipher = AES.new(key) + + data = data + (" " * (16 - (len(data) % 16))) + return cipher.encrypt(data) + + def aesDecrypt(data, key): + cipher = AES.new(key) + + return cipher.decrypt(data).rstrip() + +def getKeyLength(): + return 32 diff --git a/pyload/lib/beaker/crypto/util.py b/pyload/lib/beaker/crypto/util.py new file mode 100644 index 000000000..d97e8ce6f --- /dev/null +++ b/pyload/lib/beaker/crypto/util.py @@ -0,0 +1,30 @@ +from warnings import warn +from beaker import util + + +try: + # Use PyCrypto (if available) + from Crypto.Hash import HMAC as hmac, SHA as hmac_sha1 + sha1 = hmac_sha1.new + +except ImportError: + + # PyCrypto not available. Use the Python standard library. + import hmac + + # When using the stdlib, we have to make sure the hmac version and sha + # version are compatible + if util.py24: + from sha import sha as sha1 + import sha as hmac_sha1 + else: + # NOTE: We have to use the callable with hashlib (hashlib.sha1), + # otherwise hmac only accepts the sha module object itself + from hashlib import sha1 + hmac_sha1 = sha1 + + +if util.py24: + from md5 import md5 +else: + from hashlib import md5 diff --git a/pyload/lib/beaker/exceptions.py b/pyload/lib/beaker/exceptions.py new file mode 100644 index 000000000..cc0eed286 --- /dev/null +++ b/pyload/lib/beaker/exceptions.py @@ -0,0 +1,24 @@ +"""Beaker exception classes""" + +class BeakerException(Exception): + pass + + +class CreationAbortedError(Exception): + """Deprecated.""" + + +class InvalidCacheBackendError(BeakerException, ImportError): + pass + + +class MissingCacheParameter(BeakerException): + pass + + +class LockError(BeakerException): + pass + + +class InvalidCryptoBackendError(BeakerException): + pass diff --git a/pyload/lib/beaker/ext/__init__.py b/pyload/lib/beaker/ext/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pyload/lib/beaker/ext/database.py b/pyload/lib/beaker/ext/database.py new file mode 100644 index 000000000..701e6f7d2 --- /dev/null +++ b/pyload/lib/beaker/ext/database.py @@ -0,0 +1,165 @@ +import cPickle +import logging +import pickle +from datetime import datetime + +from beaker.container import OpenResourceNamespaceManager, Container +from beaker.exceptions import InvalidCacheBackendError, MissingCacheParameter +from beaker.synchronization import file_synchronizer, null_synchronizer +from beaker.util import verify_directory, SyncDict + +log = logging.getLogger(__name__) + +sa = None +pool = None +types = None + +class DatabaseNamespaceManager(OpenResourceNamespaceManager): + metadatas = SyncDict() + tables = SyncDict() + + @classmethod + def _init_dependencies(cls): + global sa, pool, types + if sa is not None: + return + try: + import sqlalchemy as sa + import sqlalchemy.pool as pool + from sqlalchemy import types + except ImportError: + raise InvalidCacheBackendError("Database cache backend requires " + "the 'sqlalchemy' library") + + def __init__(self, namespace, url=None, sa_opts=None, optimistic=False, + table_name='beaker_cache', data_dir=None, lock_dir=None, + **params): + """Creates a database namespace manager + + ``url`` + SQLAlchemy compliant db url + ``sa_opts`` + A dictionary of SQLAlchemy keyword options to initialize the engine + with. + ``optimistic`` + Use optimistic session locking, note that this will result in an + additional select when updating a cache value to compare version + numbers. + ``table_name`` + The table name to use in the database for the cache. + """ + OpenResourceNamespaceManager.__init__(self, namespace) + + if sa_opts is None: + sa_opts = params + + if lock_dir: + self.lock_dir = lock_dir + elif data_dir: + self.lock_dir = data_dir + "/container_db_lock" + if self.lock_dir: + verify_directory(self.lock_dir) + + # Check to see if the table's been created before + url = url or sa_opts['sa.url'] + table_key = url + table_name + def make_cache(): + # Check to see if we have a connection pool open already + meta_key = url + table_name + def make_meta(): + # SQLAlchemy pops the url, this ensures it sticks around + # later + sa_opts['sa.url'] = url + engine = sa.engine_from_config(sa_opts, 'sa.') + meta = sa.MetaData() + meta.bind = engine + return meta + meta = DatabaseNamespaceManager.metadatas.get(meta_key, make_meta) + # Create the table object and cache it now + cache = sa.Table(table_name, meta, + sa.Column('id', types.Integer, primary_key=True), + sa.Column('namespace', types.String(255), nullable=False), + sa.Column('accessed', types.DateTime, nullable=False), + sa.Column('created', types.DateTime, nullable=False), + sa.Column('data', types.PickleType, nullable=False), + sa.UniqueConstraint('namespace') + ) + cache.create(checkfirst=True) + return cache + self.hash = {} + self._is_new = False + self.loaded = False + self.cache = DatabaseNamespaceManager.tables.get(table_key, make_cache) + + def get_access_lock(self): + return null_synchronizer() + + def get_creation_lock(self, key): + return file_synchronizer( + identifier ="databasecontainer/funclock/%s" % self.namespace, + lock_dir = self.lock_dir) + + def do_open(self, flags): + # If we already loaded the data, don't bother loading it again + if self.loaded: + self.flags = flags + return + + cache = self.cache + result = sa.select([cache.c.data], + cache.c.namespace==self.namespace + ).execute().fetchone() + if not result: + self._is_new = True + self.hash = {} + else: + self._is_new = False + try: + self.hash = result['data'] + except (IOError, OSError, EOFError, cPickle.PickleError, + pickle.PickleError): + log.debug("Couln't load pickle data, creating new storage") + self.hash = {} + self._is_new = True + self.flags = flags + self.loaded = True + + def do_close(self): + if self.flags is not None and (self.flags == 'c' or self.flags == 'w'): + cache = self.cache + if self._is_new: + cache.insert().execute(namespace=self.namespace, data=self.hash, + accessed=datetime.now(), + created=datetime.now()) + self._is_new = False + else: + cache.update(cache.c.namespace==self.namespace).execute( + data=self.hash, accessed=datetime.now()) + self.flags = None + + def do_remove(self): + cache = self.cache + cache.delete(cache.c.namespace==self.namespace).execute() + self.hash = {} + + # We can retain the fact that we did a load attempt, but since the + # file is gone this will be a new namespace should it be saved. + self._is_new = True + + def __getitem__(self, key): + return self.hash[key] + + def __contains__(self, key): + return self.hash.has_key(key) + + def __setitem__(self, key, value): + self.hash[key] = value + + def __delitem__(self, key): + del self.hash[key] + + def keys(self): + return self.hash.keys() + +class DatabaseContainer(Container): + namespace_manager = DatabaseNamespaceManager diff --git a/pyload/lib/beaker/ext/google.py b/pyload/lib/beaker/ext/google.py new file mode 100644 index 000000000..dd8380d7f --- /dev/null +++ b/pyload/lib/beaker/ext/google.py @@ -0,0 +1,120 @@ +import cPickle +import logging +from datetime import datetime + +from beaker.container import OpenResourceNamespaceManager, Container +from beaker.exceptions import InvalidCacheBackendError +from beaker.synchronization import null_synchronizer + +log = logging.getLogger(__name__) + +db = None + +class GoogleNamespaceManager(OpenResourceNamespaceManager): + tables = {} + + @classmethod + def _init_dependencies(cls): + global db + if db is not None: + return + try: + db = __import__('google.appengine.ext.db').appengine.ext.db + except ImportError: + raise InvalidCacheBackendError("Datastore cache backend requires the " + "'google.appengine.ext' library") + + def __init__(self, namespace, table_name='beaker_cache', **params): + """Creates a datastore namespace manager""" + OpenResourceNamespaceManager.__init__(self, namespace) + + def make_cache(): + table_dict = dict(created=db.DateTimeProperty(), + accessed=db.DateTimeProperty(), + data=db.BlobProperty()) + table = type(table_name, (db.Model,), table_dict) + return table + self.table_name = table_name + self.cache = GoogleNamespaceManager.tables.setdefault(table_name, make_cache()) + self.hash = {} + self._is_new = False + self.loaded = False + self.log_debug = logging.DEBUG >= log.getEffectiveLevel() + + # Google wants namespaces to start with letters, change the namespace + # to start with a letter + self.namespace = 'p%s' % self.namespace + + def get_access_lock(self): + return null_synchronizer() + + def get_creation_lock(self, key): + # this is weird, should probably be present + return null_synchronizer() + + def do_open(self, flags): + # If we already loaded the data, don't bother loading it again + if self.loaded: + self.flags = flags + return + + item = self.cache.get_by_key_name(self.namespace) + + if not item: + self._is_new = True + self.hash = {} + else: + self._is_new = False + try: + self.hash = cPickle.loads(str(item.data)) + except (IOError, OSError, EOFError, cPickle.PickleError): + if self.log_debug: + log.debug("Couln't load pickle data, creating new storage") + self.hash = {} + self._is_new = True + self.flags = flags + self.loaded = True + + def do_close(self): + if self.flags is not None and (self.flags == 'c' or self.flags == 'w'): + if self._is_new: + item = self.cache(key_name=self.namespace) + item.data = cPickle.dumps(self.hash) + item.created = datetime.now() + item.accessed = datetime.now() + item.put() + self._is_new = False + else: + item = self.cache.get_by_key_name(self.namespace) + item.data = cPickle.dumps(self.hash) + item.accessed = datetime.now() + item.put() + self.flags = None + + def do_remove(self): + item = self.cache.get_by_key_name(self.namespace) + item.delete() + self.hash = {} + + # We can retain the fact that we did a load attempt, but since the + # file is gone this will be a new namespace should it be saved. + self._is_new = True + + def __getitem__(self, key): + return self.hash[key] + + def __contains__(self, key): + return self.hash.has_key(key) + + def __setitem__(self, key, value): + self.hash[key] = value + + def __delitem__(self, key): + del self.hash[key] + + def keys(self): + return self.hash.keys() + + +class GoogleContainer(Container): + namespace_class = GoogleNamespaceManager diff --git a/pyload/lib/beaker/ext/memcached.py b/pyload/lib/beaker/ext/memcached.py new file mode 100644 index 000000000..96516953f --- /dev/null +++ b/pyload/lib/beaker/ext/memcached.py @@ -0,0 +1,82 @@ +from beaker.container import NamespaceManager, Container +from beaker.exceptions import InvalidCacheBackendError, MissingCacheParameter +from beaker.synchronization import file_synchronizer, null_synchronizer +from beaker.util import verify_directory, SyncDict +import warnings + +memcache = None + +class MemcachedNamespaceManager(NamespaceManager): + clients = SyncDict() + + @classmethod + def _init_dependencies(cls): + global memcache + if memcache is not None: + return + try: + import pylibmc as memcache + except ImportError: + try: + import cmemcache as memcache + warnings.warn("cmemcache is known to have serious " + "concurrency issues; consider using 'memcache' or 'pylibmc'") + except ImportError: + try: + import memcache + except ImportError: + raise InvalidCacheBackendError("Memcached cache backend requires either " + "the 'memcache' or 'cmemcache' library") + + def __init__(self, namespace, url=None, data_dir=None, lock_dir=None, **params): + NamespaceManager.__init__(self, namespace) + + if not url: + raise MissingCacheParameter("url is required") + + if lock_dir: + self.lock_dir = lock_dir + elif data_dir: + self.lock_dir = data_dir + "/container_mcd_lock" + if self.lock_dir: + verify_directory(self.lock_dir) + + self.mc = MemcachedNamespaceManager.clients.get(url, memcache.Client, url.split(';')) + + def get_creation_lock(self, key): + return file_synchronizer( + identifier="memcachedcontainer/funclock/%s" % self.namespace,lock_dir = self.lock_dir) + + def _format_key(self, key): + return self.namespace + '_' + key.replace(' ', '\302\267') + + def __getitem__(self, key): + return self.mc.get(self._format_key(key)) + + def __contains__(self, key): + value = self.mc.get(self._format_key(key)) + return value is not None + + def has_key(self, key): + return key in self + + def set_value(self, key, value, expiretime=None): + if expiretime: + self.mc.set(self._format_key(key), value, time=expiretime) + else: + self.mc.set(self._format_key(key), value) + + def __setitem__(self, key, value): + self.set_value(key, value) + + def __delitem__(self, key): + self.mc.delete(self._format_key(key)) + + def do_remove(self): + self.mc.flush_all() + + def keys(self): + raise NotImplementedError("Memcache caching does not support iteration of all cache keys") + +class MemcachedContainer(Container): + namespace_class = MemcachedNamespaceManager diff --git a/pyload/lib/beaker/ext/sqla.py b/pyload/lib/beaker/ext/sqla.py new file mode 100644 index 000000000..8c79633c1 --- /dev/null +++ b/pyload/lib/beaker/ext/sqla.py @@ -0,0 +1,133 @@ +import cPickle +import logging +import pickle +from datetime import datetime + +from beaker.container import OpenResourceNamespaceManager, Container +from beaker.exceptions import InvalidCacheBackendError, MissingCacheParameter +from beaker.synchronization import file_synchronizer, null_synchronizer +from beaker.util import verify_directory, SyncDict + + +log = logging.getLogger(__name__) + +sa = None + +class SqlaNamespaceManager(OpenResourceNamespaceManager): + binds = SyncDict() + tables = SyncDict() + + @classmethod + def _init_dependencies(cls): + global sa + if sa is not None: + return + try: + import sqlalchemy as sa + except ImportError: + raise InvalidCacheBackendError("SQLAlchemy, which is required by " + "this backend, is not installed") + + def __init__(self, namespace, bind, table, data_dir=None, lock_dir=None, + **kwargs): + """Create a namespace manager for use with a database table via + SQLAlchemy. + + ``bind`` + SQLAlchemy ``Engine`` or ``Connection`` object + + ``table`` + SQLAlchemy ``Table`` object in which to store namespace data. + This should usually be something created by ``make_cache_table``. + """ + OpenResourceNamespaceManager.__init__(self, namespace) + + if lock_dir: + self.lock_dir = lock_dir + elif data_dir: + self.lock_dir = data_dir + "/container_db_lock" + if self.lock_dir: + verify_directory(self.lock_dir) + + self.bind = self.__class__.binds.get(str(bind.url), lambda: bind) + self.table = self.__class__.tables.get('%s:%s' % (bind.url, table.name), + lambda: table) + self.hash = {} + self._is_new = False + self.loaded = False + + def get_access_lock(self): + return null_synchronizer() + + def get_creation_lock(self, key): + return file_synchronizer( + identifier ="databasecontainer/funclock/%s" % self.namespace, + lock_dir=self.lock_dir) + + def do_open(self, flags): + if self.loaded: + self.flags = flags + return + select = sa.select([self.table.c.data], + (self.table.c.namespace == self.namespace)) + result = self.bind.execute(select).fetchone() + if not result: + self._is_new = True + self.hash = {} + else: + self._is_new = False + try: + self.hash = result['data'] + except (IOError, OSError, EOFError, cPickle.PickleError, + pickle.PickleError): + log.debug("Couln't load pickle data, creating new storage") + self.hash = {} + self._is_new = True + self.flags = flags + self.loaded = True + + def do_close(self): + if self.flags is not None and (self.flags == 'c' or self.flags == 'w'): + if self._is_new: + insert = self.table.insert() + self.bind.execute(insert, namespace=self.namespace, data=self.hash, + accessed=datetime.now(), created=datetime.now()) + self._is_new = False + else: + update = self.table.update(self.table.c.namespace == self.namespace) + self.bind.execute(update, data=self.hash, accessed=datetime.now()) + self.flags = None + + def do_remove(self): + delete = self.table.delete(self.table.c.namespace == self.namespace) + self.bind.execute(delete) + self.hash = {} + self._is_new = True + + def __getitem__(self, key): + return self.hash[key] + + def __contains__(self, key): + return self.hash.has_key(key) + + def __setitem__(self, key, value): + self.hash[key] = value + + def __delitem__(self, key): + del self.hash[key] + + def keys(self): + return self.hash.keys() + + +class SqlaContainer(Container): + namespace_manager = SqlaNamespaceManager + +def make_cache_table(metadata, table_name='beaker_cache'): + """Return a ``Table`` object suitable for storing cached values for the + namespace manager. Do not create the table.""" + return sa.Table(table_name, metadata, + sa.Column('namespace', sa.String(255), primary_key=True), + sa.Column('accessed', sa.DateTime, nullable=False), + sa.Column('created', sa.DateTime, nullable=False), + sa.Column('data', sa.PickleType, nullable=False)) diff --git a/pyload/lib/beaker/middleware.py b/pyload/lib/beaker/middleware.py new file mode 100644 index 000000000..7ba88b37d --- /dev/null +++ b/pyload/lib/beaker/middleware.py @@ -0,0 +1,165 @@ +import warnings + +try: + from paste.registry import StackedObjectProxy + beaker_session = StackedObjectProxy(name="Beaker Session") + beaker_cache = StackedObjectProxy(name="Cache Manager") +except: + beaker_cache = None + beaker_session = None + +from beaker.cache import CacheManager +from beaker.session import Session, SessionObject +from beaker.util import coerce_cache_params, coerce_session_params, \ + parse_cache_config_options + + +class CacheMiddleware(object): + cache = beaker_cache + + def __init__(self, app, config=None, environ_key='beaker.cache', **kwargs): + """Initialize the Cache Middleware + + The Cache middleware will make a Cache instance available + every request under the ``environ['beaker.cache']`` key by + default. The location in environ can be changed by setting + ``environ_key``. + + ``config`` + dict All settings should be prefixed by 'cache.'. This + method of passing variables is intended for Paste and other + setups that accumulate multiple component settings in a + single dictionary. If config contains *no cache. prefixed + args*, then *all* of the config options will be used to + intialize the Cache objects. + + ``environ_key`` + Location where the Cache instance will keyed in the WSGI + environ + + ``**kwargs`` + All keyword arguments are assumed to be cache settings and + will override any settings found in ``config`` + + """ + self.app = app + config = config or {} + + self.options = {} + + # Update the options with the parsed config + self.options.update(parse_cache_config_options(config)) + + # Add any options from kwargs, but leave out the defaults this + # time + self.options.update( + parse_cache_config_options(kwargs, include_defaults=False)) + + # Assume all keys are intended for cache if none are prefixed with + # 'cache.' + if not self.options and config: + self.options = config + + self.options.update(kwargs) + self.cache_manager = CacheManager(**self.options) + self.environ_key = environ_key + + def __call__(self, environ, start_response): + if environ.get('paste.registry'): + if environ['paste.registry'].reglist: + environ['paste.registry'].register(self.cache, + self.cache_manager) + environ[self.environ_key] = self.cache_manager + return self.app(environ, start_response) + + +class SessionMiddleware(object): + session = beaker_session + + def __init__(self, wrap_app, config=None, environ_key='beaker.session', + **kwargs): + """Initialize the Session Middleware + + The Session middleware will make a lazy session instance + available every request under the ``environ['beaker.session']`` + key by default. The location in environ can be changed by + setting ``environ_key``. + + ``config`` + dict All settings should be prefixed by 'session.'. This + method of passing variables is intended for Paste and other + setups that accumulate multiple component settings in a + single dictionary. If config contains *no cache. prefixed + args*, then *all* of the config options will be used to + intialize the Cache objects. + + ``environ_key`` + Location where the Session instance will keyed in the WSGI + environ + + ``**kwargs`` + All keyword arguments are assumed to be session settings and + will override any settings found in ``config`` + + """ + config = config or {} + + # Load up the default params + self.options = dict(invalidate_corrupt=True, type=None, + data_dir=None, key='beaker.session.id', + timeout=None, secret=None, log_file=None) + + # Pull out any config args meant for beaker session. if there are any + for dct in [config, kwargs]: + for key, val in dct.iteritems(): + if key.startswith('beaker.session.'): + self.options[key[15:]] = val + if key.startswith('session.'): + self.options[key[8:]] = val + if key.startswith('session_'): + warnings.warn('Session options should start with session. ' + 'instead of session_.', DeprecationWarning, 2) + self.options[key[8:]] = val + + # Coerce and validate session params + coerce_session_params(self.options) + + # Assume all keys are intended for cache if none are prefixed with + # 'cache.' + if not self.options and config: + self.options = config + + self.options.update(kwargs) + self.wrap_app = wrap_app + self.environ_key = environ_key + + def __call__(self, environ, start_response): + session = SessionObject(environ, **self.options) + if environ.get('paste.registry'): + if environ['paste.registry'].reglist: + environ['paste.registry'].register(self.session, session) + environ[self.environ_key] = session + environ['beaker.get_session'] = self._get_session + + def session_start_response(status, headers, exc_info = None): + if session.accessed(): + session.persist() + if session.__dict__['_headers']['set_cookie']: + cookie = session.__dict__['_headers']['cookie_out'] + if cookie: + headers.append(('Set-cookie', cookie)) + return start_response(status, headers, exc_info) + return self.wrap_app(environ, session_start_response) + + def _get_session(self): + return Session({}, use_cookies=False, **self.options) + + +def session_filter_factory(global_conf, **kwargs): + def filter(app): + return SessionMiddleware(app, global_conf, **kwargs) + return filter + + +def session_filter_app_factory(app, global_conf, **kwargs): + return SessionMiddleware(app, global_conf, **kwargs) diff --git a/pyload/lib/beaker/session.py b/pyload/lib/beaker/session.py new file mode 100644 index 000000000..7d465530b --- /dev/null +++ b/pyload/lib/beaker/session.py @@ -0,0 +1,618 @@ +import Cookie +import os +import random +import time +from datetime import datetime, timedelta + +from beaker.crypto import hmac as HMAC, hmac_sha1 as SHA1, md5 +from beaker.util import pickle + +from beaker import crypto +from beaker.cache import clsmap +from beaker.exceptions import BeakerException, InvalidCryptoBackendError +from base64 import b64encode, b64decode + + +__all__ = ['SignedCookie', 'Session'] + +getpid = hasattr(os, 'getpid') and os.getpid or (lambda : '') + +class SignedCookie(Cookie.BaseCookie): + """Extends python cookie to give digital signature support""" + def __init__(self, secret, input=None): + self.secret = secret + Cookie.BaseCookie.__init__(self, input) + + def value_decode(self, val): + val = val.strip('"') + sig = HMAC.new(self.secret, val[40:], SHA1).hexdigest() + + # Avoid timing attacks + invalid_bits = 0 + input_sig = val[:40] + if len(sig) != len(input_sig): + return None, val + + for a, b in zip(sig, input_sig): + invalid_bits += a != b + + if invalid_bits: + return None, val + else: + return val[40:], val + + def value_encode(self, val): + sig = HMAC.new(self.secret, val, SHA1).hexdigest() + return str(val), ("%s%s" % (sig, val)) + + +class Session(dict): + """Session object that uses container package for storage. + + ``key`` + The name the cookie should be set to. + ``timeout`` + How long session data is considered valid. This is used + regardless of the cookie being present or not to determine + whether session data is still valid. + ``cookie_domain`` + Domain to use for the cookie. + ``secure`` + Whether or not the cookie should only be sent over SSL. + """ + def __init__(self, request, id=None, invalidate_corrupt=False, + use_cookies=True, type=None, data_dir=None, + key='beaker.session.id', timeout=None, cookie_expires=True, + cookie_domain=None, secret=None, secure=False, + namespace_class=None, **namespace_args): + if not type: + if data_dir: + self.type = 'file' + else: + self.type = 'memory' + else: + self.type = type + + self.namespace_class = namespace_class or clsmap[self.type] + + self.namespace_args = namespace_args + + self.request = request + self.data_dir = data_dir + self.key = key + + self.timeout = timeout + self.use_cookies = use_cookies + self.cookie_expires = cookie_expires + + # Default cookie domain/path + self._domain = cookie_domain + self._path = '/' + self.was_invalidated = False + self.secret = secret + self.secure = secure + self.id = id + self.accessed_dict = {} + + if self.use_cookies: + cookieheader = request.get('cookie', '') + if secret: + try: + self.cookie = SignedCookie(secret, input=cookieheader) + except Cookie.CookieError: + self.cookie = SignedCookie(secret, input=None) + else: + self.cookie = Cookie.SimpleCookie(input=cookieheader) + + if not self.id and self.key in self.cookie: + self.id = self.cookie[self.key].value + + self.is_new = self.id is None + if self.is_new: + self._create_id() + self['_accessed_time'] = self['_creation_time'] = time.time() + else: + try: + self.load() + except: + if invalidate_corrupt: + self.invalidate() + else: + raise + + def _create_id(self): + self.id = md5( + md5("%f%s%f%s" % (time.time(), id({}), random.random(), + getpid())).hexdigest(), + ).hexdigest() + self.is_new = True + self.last_accessed = None + if self.use_cookies: + self.cookie[self.key] = self.id + if self._domain: + self.cookie[self.key]['domain'] = self._domain + if self.secure: + self.cookie[self.key]['secure'] = True + self.cookie[self.key]['path'] = self._path + if self.cookie_expires is not True: + if self.cookie_expires is False: + expires = datetime.fromtimestamp( 0x7FFFFFFF ) + elif isinstance(self.cookie_expires, timedelta): + expires = datetime.today() + self.cookie_expires + elif isinstance(self.cookie_expires, datetime): + expires = self.cookie_expires + else: + raise ValueError("Invalid argument for cookie_expires: %s" + % repr(self.cookie_expires)) + self.cookie[self.key]['expires'] = \ + expires.strftime("%a, %d-%b-%Y %H:%M:%S GMT" ) + self.request['cookie_out'] = self.cookie[self.key].output(header='') + self.request['set_cookie'] = False + + def created(self): + return self['_creation_time'] + created = property(created) + + def _set_domain(self, domain): + self['_domain'] = domain + self.cookie[self.key]['domain'] = domain + self.request['cookie_out'] = self.cookie[self.key].output(header='') + self.request['set_cookie'] = True + + def _get_domain(self): + return self._domain + + domain = property(_get_domain, _set_domain) + + def _set_path(self, path): + self['_path'] = path + self.cookie[self.key]['path'] = path + self.request['cookie_out'] = self.cookie[self.key].output(header='') + self.request['set_cookie'] = True + + def _get_path(self): + return self._path + + path = property(_get_path, _set_path) + + def _delete_cookie(self): + self.request['set_cookie'] = True + self.cookie[self.key] = self.id + if self._domain: + self.cookie[self.key]['domain'] = self._domain + if self.secure: + self.cookie[self.key]['secure'] = True + self.cookie[self.key]['path'] = '/' + expires = datetime.today().replace(year=2003) + self.cookie[self.key]['expires'] = \ + expires.strftime("%a, %d-%b-%Y %H:%M:%S GMT" ) + self.request['cookie_out'] = self.cookie[self.key].output(header='') + self.request['set_cookie'] = True + + def delete(self): + """Deletes the session from the persistent storage, and sends + an expired cookie out""" + if self.use_cookies: + self._delete_cookie() + self.clear() + + def invalidate(self): + """Invalidates this session, creates a new session id, returns + to the is_new state""" + self.clear() + self.was_invalidated = True + self._create_id() + self.load() + + def load(self): + "Loads the data from this session from persistent storage" + self.namespace = self.namespace_class(self.id, + data_dir=self.data_dir, digest_filenames=False, + **self.namespace_args) + now = time.time() + self.request['set_cookie'] = True + + self.namespace.acquire_read_lock() + timed_out = False + try: + self.clear() + try: + session_data = self.namespace['session'] + + # Memcached always returns a key, its None when its not + # present + if session_data is None: + session_data = { + '_creation_time':now, + '_accessed_time':now + } + self.is_new = True + except (KeyError, TypeError): + session_data = { + '_creation_time':now, + '_accessed_time':now + } + self.is_new = True + + if self.timeout is not None and \ + now - session_data['_accessed_time'] > self.timeout: + timed_out= True + else: + # Properly set the last_accessed time, which is different + # than the *currently* _accessed_time + if self.is_new or '_accessed_time' not in session_data: + self.last_accessed = None + else: + self.last_accessed = session_data['_accessed_time'] + + # Update the current _accessed_time + session_data['_accessed_time'] = now + self.update(session_data) + self.accessed_dict = session_data.copy() + finally: + self.namespace.release_read_lock() + if timed_out: + self.invalidate() + + def save(self, accessed_only=False): + """Saves the data for this session to persistent storage + + If accessed_only is True, then only the original data loaded + at the beginning of the request will be saved, with the updated + last accessed time. + + """ + # Look to see if its a new session that was only accessed + # Don't save it under that case + if accessed_only and self.is_new: + return None + + if not hasattr(self, 'namespace'): + self.namespace = self.namespace_class( + self.id, + data_dir=self.data_dir, + digest_filenames=False, + **self.namespace_args) + + self.namespace.acquire_write_lock() + try: + if accessed_only: + data = dict(self.accessed_dict.items()) + else: + data = dict(self.items()) + + # Save the data + if not data and 'session' in self.namespace: + del self.namespace['session'] + else: + self.namespace['session'] = data + finally: + self.namespace.release_write_lock() + if self.is_new: + self.request['set_cookie'] = True + + def revert(self): + """Revert the session to its original state from its first + access in the request""" + self.clear() + self.update(self.accessed_dict) + + # TODO: I think both these methods should be removed. They're from + # the original mod_python code i was ripping off but they really + # have no use here. + def lock(self): + """Locks this session against other processes/threads. This is + automatic when load/save is called. + + ***use with caution*** and always with a corresponding 'unlock' + inside a "finally:" block, as a stray lock typically cannot be + unlocked without shutting down the whole application. + + """ + self.namespace.acquire_write_lock() + + def unlock(self): + """Unlocks this session against other processes/threads. This + is automatic when load/save is called. + + ***use with caution*** and always within a "finally:" block, as + a stray lock typically cannot be unlocked without shutting down + the whole application. + + """ + self.namespace.release_write_lock() + +class CookieSession(Session): + """Pure cookie-based session + + Options recognized when using cookie-based sessions are slightly + more restricted than general sessions. + + ``key`` + The name the cookie should be set to. + ``timeout`` + How long session data is considered valid. This is used + regardless of the cookie being present or not to determine + whether session data is still valid. + ``encrypt_key`` + The key to use for the session encryption, if not provided the + session will not be encrypted. + ``validate_key`` + The key used to sign the encrypted session + ``cookie_domain`` + Domain to use for the cookie. + ``secure`` + Whether or not the cookie should only be sent over SSL. + + """ + def __init__(self, request, key='beaker.session.id', timeout=None, + cookie_expires=True, cookie_domain=None, encrypt_key=None, + validate_key=None, secure=False, **kwargs): + + if not crypto.has_aes and encrypt_key: + raise InvalidCryptoBackendError("No AES library is installed, can't generate " + "encrypted cookie-only Session.") + + self.request = request + self.key = key + self.timeout = timeout + self.cookie_expires = cookie_expires + self.encrypt_key = encrypt_key + self.validate_key = validate_key + self.request['set_cookie'] = False + self.secure = secure + self._domain = cookie_domain + self._path = '/' + + try: + cookieheader = request['cookie'] + except KeyError: + cookieheader = '' + + if validate_key is None: + raise BeakerException("No validate_key specified for Cookie only " + "Session.") + + try: + self.cookie = SignedCookie(validate_key, input=cookieheader) + except Cookie.CookieError: + self.cookie = SignedCookie(validate_key, input=None) + + self['_id'] = self._make_id() + self.is_new = True + + # If we have a cookie, load it + if self.key in self.cookie and self.cookie[self.key].value is not None: + self.is_new = False + try: + self.update(self._decrypt_data()) + except: + pass + if self.timeout is not None and time.time() - \ + self['_accessed_time'] > self.timeout: + self.clear() + self.accessed_dict = self.copy() + self._create_cookie() + + def created(self): + return self['_creation_time'] + created = property(created) + + def id(self): + return self['_id'] + id = property(id) + + def _set_domain(self, domain): + self['_domain'] = domain + self._domain = domain + + def _get_domain(self): + return self._domain + + domain = property(_get_domain, _set_domain) + + def _set_path(self, path): + self['_path'] = path + self._path = path + + def _get_path(self): + return self._path + + path = property(_get_path, _set_path) + + def _encrypt_data(self): + """Serialize, encipher, and base64 the session dict""" + if self.encrypt_key: + nonce = b64encode(os.urandom(40))[:8] + encrypt_key = crypto.generateCryptoKeys(self.encrypt_key, + self.validate_key + nonce, 1) + data = pickle.dumps(self.copy(), 2) + return nonce + b64encode(crypto.aesEncrypt(data, encrypt_key)) + else: + data = pickle.dumps(self.copy(), 2) + return b64encode(data) + + def _decrypt_data(self): + """Bas64, decipher, then un-serialize the data for the session + dict""" + if self.encrypt_key: + nonce = self.cookie[self.key].value[:8] + encrypt_key = crypto.generateCryptoKeys(self.encrypt_key, + self.validate_key + nonce, 1) + payload = b64decode(self.cookie[self.key].value[8:]) + data = crypto.aesDecrypt(payload, encrypt_key) + return pickle.loads(data) + else: + data = b64decode(self.cookie[self.key].value) + return pickle.loads(data) + + def _make_id(self): + return md5(md5( + "%f%s%f%s" % (time.time(), id({}), random.random(), getpid()) + ).hexdigest() + ).hexdigest() + + def save(self, accessed_only=False): + """Saves the data for this session to persistent storage""" + if accessed_only and self.is_new: + return + if accessed_only: + self.clear() + self.update(self.accessed_dict) + self._create_cookie() + + def expire(self): + """Delete the 'expires' attribute on this Session, if any.""" + + self.pop('_expires', None) + + def _create_cookie(self): + if '_creation_time' not in self: + self['_creation_time'] = time.time() + if '_id' not in self: + self['_id'] = self._make_id() + self['_accessed_time'] = time.time() + + if self.cookie_expires is not True: + if self.cookie_expires is False: + expires = datetime.fromtimestamp( 0x7FFFFFFF ) + elif isinstance(self.cookie_expires, timedelta): + expires = datetime.today() + self.cookie_expires + elif isinstance(self.cookie_expires, datetime): + expires = self.cookie_expires + else: + raise ValueError("Invalid argument for cookie_expires: %s" + % repr(self.cookie_expires)) + self['_expires'] = expires + elif '_expires' in self: + expires = self['_expires'] + else: + expires = None + + val = self._encrypt_data() + if len(val) > 4064: + raise BeakerException("Cookie value is too long to store") + + self.cookie[self.key] = val + if '_domain' in self: + self.cookie[self.key]['domain'] = self['_domain'] + elif self._domain: + self.cookie[self.key]['domain'] = self._domain + if self.secure: + self.cookie[self.key]['secure'] = True + + self.cookie[self.key]['path'] = self.get('_path', '/') + + if expires: + self.cookie[self.key]['expires'] = \ + expires.strftime("%a, %d-%b-%Y %H:%M:%S GMT" ) + self.request['cookie_out'] = self.cookie[self.key].output(header='') + self.request['set_cookie'] = True + + def delete(self): + """Delete the cookie, and clear the session""" + # Send a delete cookie request + self._delete_cookie() + self.clear() + + def invalidate(self): + """Clear the contents and start a new session""" + self.delete() + self['_id'] = self._make_id() + + +class SessionObject(object): + """Session proxy/lazy creator + + This object proxies access to the actual session object, so that in + the case that the session hasn't been used before, it will be + setup. This avoid creating and loading the session from persistent + storage unless its actually used during the request. + + """ + def __init__(self, environ, **params): + self.__dict__['_params'] = params + self.__dict__['_environ'] = environ + self.__dict__['_sess'] = None + self.__dict__['_headers'] = [] + + def _session(self): + """Lazy initial creation of session object""" + if self.__dict__['_sess'] is None: + params = self.__dict__['_params'] + environ = self.__dict__['_environ'] + self.__dict__['_headers'] = req = {'cookie_out':None} + req['cookie'] = environ.get('HTTP_COOKIE') + if params.get('type') == 'cookie': + self.__dict__['_sess'] = CookieSession(req, **params) + else: + self.__dict__['_sess'] = Session(req, use_cookies=True, + **params) + return self.__dict__['_sess'] + + def __getattr__(self, attr): + return getattr(self._session(), attr) + + def __setattr__(self, attr, value): + setattr(self._session(), attr, value) + + def __delattr__(self, name): + self._session().__delattr__(name) + + def __getitem__(self, key): + return self._session()[key] + + def __setitem__(self, key, value): + self._session()[key] = value + + def __delitem__(self, key): + self._session().__delitem__(key) + + def __repr__(self): + return self._session().__repr__() + + def __iter__(self): + """Only works for proxying to a dict""" + return iter(self._session().keys()) + + def __contains__(self, key): + return self._session().has_key(key) + + def get_by_id(self, id): + """Loads a session given a session ID""" + params = self.__dict__['_params'] + session = Session({}, use_cookies=False, id=id, **params) + if session.is_new: + return None + return session + + def save(self): + self.__dict__['_dirty'] = True + + def delete(self): + self.__dict__['_dirty'] = True + self._session().delete() + + def persist(self): + """Persist the session to the storage + + If its set to autosave, then the entire session will be saved + regardless of if save() has been called. Otherwise, just the + accessed time will be updated if save() was not called, or + the session will be saved if save() was called. + + """ + if self.__dict__['_params'].get('auto'): + self._session().save() + else: + if self.__dict__.get('_dirty'): + self._session().save() + else: + self._session().save(accessed_only=True) + + def dirty(self): + return self.__dict__.get('_dirty', False) + + def accessed(self): + """Returns whether or not the session has been accessed""" + return self.__dict__['_sess'] is not None diff --git a/pyload/lib/beaker/synchronization.py b/pyload/lib/beaker/synchronization.py new file mode 100644 index 000000000..761303707 --- /dev/null +++ b/pyload/lib/beaker/synchronization.py @@ -0,0 +1,381 @@ +"""Synchronization functions. + +File- and mutex-based mutual exclusion synchronizers are provided, +as well as a name-based mutex which locks within an application +based on a string name. + +""" + +import os +import sys +import tempfile + +try: + import threading as _threading +except ImportError: + import dummy_threading as _threading + +# check for fcntl module +try: + sys.getwindowsversion() + has_flock = False +except: + try: + import fcntl + has_flock = True + except ImportError: + has_flock = False + +from beaker import util +from beaker.exceptions import LockError + +__all__ = ["file_synchronizer", "mutex_synchronizer", "null_synchronizer", + "NameLock", "_threading"] + + +class NameLock(object): + """a proxy for an RLock object that is stored in a name based + registry. + + Multiple threads can get a reference to the same RLock based on the + name alone, and synchronize operations related to that name. + + """ + locks = util.WeakValuedRegistry() + + class NLContainer(object): + def __init__(self, reentrant): + if reentrant: + self.lock = _threading.RLock() + else: + self.lock = _threading.Lock() + def __call__(self): + return self.lock + + def __init__(self, identifier = None, reentrant = False): + if identifier is None: + self._lock = NameLock.NLContainer(reentrant) + else: + self._lock = NameLock.locks.get(identifier, NameLock.NLContainer, + reentrant) + + def acquire(self, wait = True): + return self._lock().acquire(wait) + + def release(self): + self._lock().release() + + +_synchronizers = util.WeakValuedRegistry() +def _synchronizer(identifier, cls, **kwargs): + return _synchronizers.sync_get((identifier, cls), cls, identifier, **kwargs) + + +def file_synchronizer(identifier, **kwargs): + if not has_flock or 'lock_dir' not in kwargs: + return mutex_synchronizer(identifier) + else: + return _synchronizer(identifier, FileSynchronizer, **kwargs) + + +def mutex_synchronizer(identifier, **kwargs): + return _synchronizer(identifier, ConditionSynchronizer, **kwargs) + + +class null_synchronizer(object): + def acquire_write_lock(self, wait=True): + return True + def acquire_read_lock(self): + pass + def release_write_lock(self): + pass + def release_read_lock(self): + pass + acquire = acquire_write_lock + release = release_write_lock + + +class SynchronizerImpl(object): + def __init__(self): + self._state = util.ThreadLocal() + + class SyncState(object): + __slots__ = 'reentrantcount', 'writing', 'reading' + + def __init__(self): + self.reentrantcount = 0 + self.writing = False + self.reading = False + + def state(self): + if not self._state.has(): + state = SynchronizerImpl.SyncState() + self._state.put(state) + return state + else: + return self._state.get() + state = property(state) + + def release_read_lock(self): + state = self.state + + if state.writing: + raise LockError("lock is in writing state") + if not state.reading: + raise LockError("lock is not in reading state") + + if state.reentrantcount == 1: + self.do_release_read_lock() + state.reading = False + + state.reentrantcount -= 1 + + def acquire_read_lock(self, wait = True): + state = self.state + + if state.writing: + raise LockError("lock is in writing state") + + if state.reentrantcount == 0: + x = self.do_acquire_read_lock(wait) + if (wait or x): + state.reentrantcount += 1 + state.reading = True + return x + elif state.reading: + state.reentrantcount += 1 + return True + + def release_write_lock(self): + state = self.state + + if state.reading: + raise LockError("lock is in reading state") + if not state.writing: + raise LockError("lock is not in writing state") + + if state.reentrantcount == 1: + self.do_release_write_lock() + state.writing = False + + state.reentrantcount -= 1 + + release = release_write_lock + + def acquire_write_lock(self, wait = True): + state = self.state + + if state.reading: + raise LockError("lock is in reading state") + + if state.reentrantcount == 0: + x = self.do_acquire_write_lock(wait) + if (wait or x): + state.reentrantcount += 1 + state.writing = True + return x + elif state.writing: + state.reentrantcount += 1 + return True + + acquire = acquire_write_lock + + def do_release_read_lock(self): + raise NotImplementedError() + + def do_acquire_read_lock(self): + raise NotImplementedError() + + def do_release_write_lock(self): + raise NotImplementedError() + + def do_acquire_write_lock(self): + raise NotImplementedError() + + +class FileSynchronizer(SynchronizerImpl): + """a synchronizer which locks using flock(). + + Adapted for Python/multithreads from Apache::Session::Lock::File, + http://search.cpan.org/src/CWEST/Apache-Session-1.81/Session/Lock/File.pm + + This module does not unlink temporary files, + because it interferes with proper locking. This can cause + problems on certain systems (Linux) whose file systems (ext2) do not + perform well with lots of files in one directory. To prevent this + you should use a script to clean out old files from your lock directory. + + """ + def __init__(self, identifier, lock_dir): + super(FileSynchronizer, self).__init__() + self._filedescriptor = util.ThreadLocal() + + if lock_dir is None: + lock_dir = tempfile.gettempdir() + else: + lock_dir = lock_dir + + self.filename = util.encoded_path( + lock_dir, + [identifier], + extension='.lock' + ) + + def _filedesc(self): + return self._filedescriptor.get() + _filedesc = property(_filedesc) + + def _open(self, mode): + filedescriptor = self._filedesc + if filedescriptor is None: + filedescriptor = os.open(self.filename, mode) + self._filedescriptor.put(filedescriptor) + return filedescriptor + + def do_acquire_read_lock(self, wait): + filedescriptor = self._open(os.O_CREAT | os.O_RDONLY) + if not wait: + try: + fcntl.flock(filedescriptor, fcntl.LOCK_SH | fcntl.LOCK_NB) + return True + except IOError: + os.close(filedescriptor) + self._filedescriptor.remove() + return False + else: + fcntl.flock(filedescriptor, fcntl.LOCK_SH) + return True + + def do_acquire_write_lock(self, wait): + filedescriptor = self._open(os.O_CREAT | os.O_WRONLY) + if not wait: + try: + fcntl.flock(filedescriptor, fcntl.LOCK_EX | fcntl.LOCK_NB) + return True + except IOError: + os.close(filedescriptor) + self._filedescriptor.remove() + return False + else: + fcntl.flock(filedescriptor, fcntl.LOCK_EX) + return True + + def do_release_read_lock(self): + self._release_all_locks() + + def do_release_write_lock(self): + self._release_all_locks() + + def _release_all_locks(self): + filedescriptor = self._filedesc + if filedescriptor is not None: + fcntl.flock(filedescriptor, fcntl.LOCK_UN) + os.close(filedescriptor) + self._filedescriptor.remove() + + +class ConditionSynchronizer(SynchronizerImpl): + """a synchronizer using a Condition.""" + + def __init__(self, identifier): + super(ConditionSynchronizer, self).__init__() + + # counts how many asynchronous methods are executing + self.async = 0 + + # pointer to thread that is the current sync operation + self.current_sync_operation = None + + # condition object to lock on + self.condition = _threading.Condition(_threading.Lock()) + + def do_acquire_read_lock(self, wait = True): + self.condition.acquire() + try: + # see if a synchronous operation is waiting to start + # or is already running, in which case we wait (or just + # give up and return) + if wait: + while self.current_sync_operation is not None: + self.condition.wait() + else: + if self.current_sync_operation is not None: + return False + + self.async += 1 + finally: + self.condition.release() + + if not wait: + return True + + def do_release_read_lock(self): + self.condition.acquire() + try: + self.async -= 1 + + # check if we are the last asynchronous reader thread + # out the door. + if self.async == 0: + # yes. so if a sync operation is waiting, notifyAll to wake + # it up + if self.current_sync_operation is not None: + self.condition.notifyAll() + elif self.async < 0: + raise LockError("Synchronizer error - too many " + "release_read_locks called") + finally: + self.condition.release() + + def do_acquire_write_lock(self, wait = True): + self.condition.acquire() + try: + # here, we are not a synchronous reader, and after returning, + # assuming waiting or immediate availability, we will be. + + if wait: + # if another sync is working, wait + while self.current_sync_operation is not None: + self.condition.wait() + else: + # if another sync is working, + # we dont want to wait, so forget it + if self.current_sync_operation is not None: + return False + + # establish ourselves as the current sync + # this indicates to other read/write operations + # that they should wait until this is None again + self.current_sync_operation = _threading.currentThread() + + # now wait again for asyncs to finish + if self.async > 0: + if wait: + # wait + self.condition.wait() + else: + # we dont want to wait, so forget it + self.current_sync_operation = None + return False + finally: + self.condition.release() + + if not wait: + return True + + def do_release_write_lock(self): + self.condition.acquire() + try: + if self.current_sync_operation is not _threading.currentThread(): + raise LockError("Synchronizer error - current thread doesnt " + "have the write lock") + + # reset the current sync operation so + # another can get it + self.current_sync_operation = None + + # tell everyone to get ready + self.condition.notifyAll() + finally: + # everyone go !! + self.condition.release() diff --git a/pyload/lib/beaker/util.py b/pyload/lib/beaker/util.py new file mode 100644 index 000000000..04c9617c5 --- /dev/null +++ b/pyload/lib/beaker/util.py @@ -0,0 +1,302 @@ +"""Beaker utilities""" + +try: + import thread as _thread + import threading as _threading +except ImportError: + import dummy_thread as _thread + import dummy_threading as _threading + +from datetime import datetime, timedelta +import os +import string +import types +import weakref +import warnings +import sys + +py3k = getattr(sys, 'py3kwarning', False) or sys.version_info >= (3, 0) +py24 = sys.version_info < (2,5) +jython = sys.platform.startswith('java') + +if py3k or jython: + import pickle +else: + import cPickle as pickle + +from beaker.converters import asbool +from threading import local as _tlocal + + +__all__ = ["ThreadLocal", "Registry", "WeakValuedRegistry", "SyncDict", + "encoded_path", "verify_directory"] + + +def verify_directory(dir): + """verifies and creates a directory. tries to + ignore collisions with other threads and processes.""" + + tries = 0 + while not os.access(dir, os.F_OK): + try: + tries += 1 + os.makedirs(dir) + except: + if tries > 5: + raise + + +def deprecated(message): + def wrapper(fn): + def deprecated_method(*args, **kargs): + warnings.warn(message, DeprecationWarning, 2) + return fn(*args, **kargs) + # TODO: use decorator ? functools.wrapper ? + deprecated_method.__name__ = fn.__name__ + deprecated_method.__doc__ = "%s\n\n%s" % (message, fn.__doc__) + return deprecated_method + return wrapper + +class ThreadLocal(object): + """stores a value on a per-thread basis""" + + __slots__ = '_tlocal' + + def __init__(self): + self._tlocal = _tlocal() + + def put(self, value): + self._tlocal.value = value + + def has(self): + return hasattr(self._tlocal, 'value') + + def get(self, default=None): + return getattr(self._tlocal, 'value', default) + + def remove(self): + del self._tlocal.value + +class SyncDict(object): + """ + An efficient/threadsafe singleton map algorithm, a.k.a. + "get a value based on this key, and create if not found or not + valid" paradigm: + + exists && isvalid ? get : create + + Designed to work with weakref dictionaries to expect items + to asynchronously disappear from the dictionary. + + Use python 2.3.3 or greater ! a major bug was just fixed in Nov. + 2003 that was driving me nuts with garbage collection/weakrefs in + this section. + + """ + def __init__(self): + self.mutex = _thread.allocate_lock() + self.dict = {} + + def get(self, key, createfunc, *args, **kwargs): + try: + if self.has_key(key): + return self.dict[key] + else: + return self.sync_get(key, createfunc, *args, **kwargs) + except KeyError: + return self.sync_get(key, createfunc, *args, **kwargs) + + def sync_get(self, key, createfunc, *args, **kwargs): + self.mutex.acquire() + try: + try: + if self.has_key(key): + return self.dict[key] + else: + return self._create(key, createfunc, *args, **kwargs) + except KeyError: + return self._create(key, createfunc, *args, **kwargs) + finally: + self.mutex.release() + + def _create(self, key, createfunc, *args, **kwargs): + self[key] = obj = createfunc(*args, **kwargs) + return obj + + def has_key(self, key): + return self.dict.has_key(key) + + def __contains__(self, key): + return self.dict.__contains__(key) + def __getitem__(self, key): + return self.dict.__getitem__(key) + def __setitem__(self, key, value): + self.dict.__setitem__(key, value) + def __delitem__(self, key): + return self.dict.__delitem__(key) + def clear(self): + self.dict.clear() + + +class WeakValuedRegistry(SyncDict): + def __init__(self): + self.mutex = _threading.RLock() + self.dict = weakref.WeakValueDictionary() + +sha1 = None +def encoded_path(root, identifiers, extension = ".enc", depth = 3, + digest_filenames=True): + + """Generate a unique file-accessible path from the given list of + identifiers starting at the given root directory.""" + ident = "_".join(identifiers) + + global sha1 + if sha1 is None: + from beaker.crypto import sha1 + + if digest_filenames: + if py3k: + ident = sha1(ident.encode('utf-8')).hexdigest() + else: + ident = sha1(ident).hexdigest() + + ident = os.path.basename(ident) + + tokens = [] + for d in range(1, depth): + tokens.append(ident[0:d]) + + dir = os.path.join(root, *tokens) + verify_directory(dir) + + return os.path.join(dir, ident + extension) + + +def verify_options(opt, types, error): + if not isinstance(opt, types): + if not isinstance(types, tuple): + types = (types,) + coerced = False + for typ in types: + try: + if typ in (list, tuple): + opt = [x.strip() for x in opt.split(',')] + else: + if typ == bool: + typ = asbool + opt = typ(opt) + coerced = True + except: + pass + if coerced: + break + if not coerced: + raise Exception(error) + elif isinstance(opt, str) and not opt.strip(): + raise Exception("Empty strings are invalid for: %s" % error) + return opt + + +def verify_rules(params, ruleset): + for key, types, message in ruleset: + if key in params: + params[key] = verify_options(params[key], types, message) + return params + + +def coerce_session_params(params): + rules = [ + ('data_dir', (str, types.NoneType), "data_dir must be a string " + "referring to a directory."), + ('lock_dir', (str, types.NoneType), "lock_dir must be a string referring to a " + "directory."), + ('type', (str, types.NoneType), "Session type must be a string."), + ('cookie_expires', (bool, datetime, timedelta), "Cookie expires was " + "not a boolean, datetime, or timedelta instance."), + ('cookie_domain', (str, types.NoneType), "Cookie domain must be a " + "string."), + ('id', (str,), "Session id must be a string."), + ('key', (str,), "Session key must be a string."), + ('secret', (str, types.NoneType), "Session secret must be a string."), + ('validate_key', (str, types.NoneType), "Session encrypt_key must be " + "a string."), + ('encrypt_key', (str, types.NoneType), "Session validate_key must be " + "a string."), + ('secure', (bool, types.NoneType), "Session secure must be a boolean."), + ('timeout', (int, types.NoneType), "Session timeout must be an " + "integer."), + ('auto', (bool, types.NoneType), "Session is created if accessed."), + ] + return verify_rules(params, rules) + + +def coerce_cache_params(params): + rules = [ + ('data_dir', (str, types.NoneType), "data_dir must be a string " + "referring to a directory."), + ('lock_dir', (str, types.NoneType), "lock_dir must be a string referring to a " + "directory."), + ('type', (str,), "Cache type must be a string."), + ('enabled', (bool, types.NoneType), "enabled must be true/false " + "if present."), + ('expire', (int, types.NoneType), "expire must be an integer representing " + "how many seconds the cache is valid for"), + ('regions', (list, tuple, types.NoneType), "Regions must be a " + "comma seperated list of valid regions") + ] + return verify_rules(params, rules) + + +def parse_cache_config_options(config, include_defaults=True): + """Parse configuration options and validate for use with the + CacheManager""" + + # Load default cache options + if include_defaults: + options= dict(type='memory', data_dir=None, expire=None, + log_file=None) + else: + options = {} + for key, val in config.iteritems(): + if key.startswith('beaker.cache.'): + options[key[13:]] = val + if key.startswith('cache.'): + options[key[6:]] = val + coerce_cache_params(options) + + # Set cache to enabled if not turned off + if 'enabled' not in options: + options['enabled'] = True + + # Configure region dict if regions are available + regions = options.pop('regions', None) + if regions: + region_configs = {} + for region in regions: + # Setup the default cache options + region_options = dict(data_dir=options.get('data_dir'), + lock_dir=options.get('lock_dir'), + type=options.get('type'), + enabled=options['enabled'], + expire=options.get('expire')) + region_len = len(region) + 1 + for key in options.keys(): + if key.startswith('%s.' % region): + region_options[key[region_len:]] = options.pop(key) + coerce_cache_params(region_options) + region_configs[region] = region_options + options['cache_regions'] = region_configs + return options + +def func_namespace(func): + """Generates a unique namespace for a function""" + kls = None + if hasattr(func, 'im_func'): + kls = func.im_class + func = func.im_func + + if kls: + return '%s.%s' % (kls.__module__, kls.__name__) + else: + return '%s.%s' % (func.__module__, func.__name__) diff --git a/pyload/lib/bottle.py b/pyload/lib/bottle.py new file mode 100644 index 000000000..b00bda1c9 --- /dev/null +++ b/pyload/lib/bottle.py @@ -0,0 +1,3251 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +""" +Bottle is a fast and simple micro-framework for small web applications. It +offers request dispatching (Routes) with url parameter support, templates, +a built-in HTTP Server and adapters for many third party WSGI/HTTP-server and +template engines - all in a single file and with no dependencies other than the +Python Standard Library. + +Homepage and documentation: http://bottlepy.org/ + +Copyright (c) 2012, Marcel Hellkamp. +License: MIT (see LICENSE for details) +""" + +from __future__ import with_statement + +__author__ = 'Marcel Hellkamp' +__version__ = '0.11.4' +__license__ = 'MIT' + +# The gevent server adapter needs to patch some modules before they are imported +# This is why we parse the commandline parameters here but handle them later +if __name__ == '__main__': + from optparse import OptionParser + _cmd_parser = OptionParser(usage="usage: %prog [options] package.module:app") + _opt = _cmd_parser.add_option + _opt("--version", action="store_true", help="show version number.") + _opt("-b", "--bind", metavar="ADDRESS", help="bind socket to ADDRESS.") + _opt("-s", "--server", default='wsgiref', help="use SERVER as backend.") + _opt("-p", "--plugin", action="append", help="install additional plugin/s.") + _opt("--debug", action="store_true", help="start server in debug mode.") + _opt("--reload", action="store_true", help="auto-reload on file changes.") + _cmd_options, _cmd_args = _cmd_parser.parse_args() + if _cmd_options.server and _cmd_options.server.startswith('gevent'): + import gevent.monkey; gevent.monkey.patch_all() + +import base64, cgi, email.utils, functools, hmac, imp, itertools, mimetypes,\ + os, re, subprocess, sys, tempfile, threading, time, urllib, warnings + +from datetime import date as datedate, datetime, timedelta +from tempfile import TemporaryFile +from traceback import format_exc, print_exc + +try: from json import dumps as json_dumps, loads as json_lds +except ImportError: # pragma: no cover + try: from simplejson import dumps as json_dumps, loads as json_lds + except ImportError: + try: from django.utils.simplejson import dumps as json_dumps, loads as json_lds + except ImportError: + def json_dumps(data): + raise ImportError("JSON support requires Python 2.6 or simplejson.") + json_lds = json_dumps + + + +# We now try to fix 2.5/2.6/3.1/3.2 incompatibilities. +# It ain't pretty but it works... Sorry for the mess. + +py = sys.version_info +py3k = py >= (3,0,0) +py25 = py < (2,6,0) +py31 = (3,1,0) <= py < (3,2,0) + +# Workaround for the missing "as" keyword in py3k. +def _e(): return sys.exc_info()[1] + +# Workaround for the "print is a keyword/function" Python 2/3 dilemma +# and a fallback for mod_wsgi (resticts stdout/err attribute access) +try: + _stdout, _stderr = sys.stdout.write, sys.stderr.write +except IOError: + _stdout = lambda x: sys.stdout.write(x) + _stderr = lambda x: sys.stderr.write(x) + +# Lots of stdlib and builtin differences. +if py3k: + import http.client as httplib + import _thread as thread + from urllib.parse import urljoin, SplitResult as UrlSplitResult + from urllib.parse import urlencode, quote as urlquote, unquote as urlunquote + urlunquote = functools.partial(urlunquote, encoding='latin1') + from http.cookies import SimpleCookie + from collections import MutableMapping as DictMixin + import pickle + from io import BytesIO + basestring = str + unicode = str + json_loads = lambda s: json_lds(touni(s)) + callable = lambda x: hasattr(x, '__call__') + imap = map +else: # 2.x + import httplib + import thread + from urlparse import urljoin, SplitResult as UrlSplitResult + from urllib import urlencode, quote as urlquote, unquote as urlunquote + from Cookie import SimpleCookie + from itertools import imap + import cPickle as pickle + from StringIO import StringIO as BytesIO + if py25: + from UserDict import DictMixin + def next(it): return it.next() + bytes = str + else: # 2.6, 2.7 + from collections import MutableMapping as DictMixin + json_loads = json_lds + +# Some helpers for string/byte handling +def tob(s, enc='utf8'): + return s.encode(enc) if isinstance(s, unicode) else bytes(s) +def touni(s, enc='utf8', err='strict'): + return s.decode(enc, err) if isinstance(s, bytes) else unicode(s) +tonat = touni if py3k else tob + +# 3.2 fixes cgi.FieldStorage to accept bytes (which makes a lot of sense). +# 3.1 needs a workaround. +if py31: + from io import TextIOWrapper + class NCTextIOWrapper(TextIOWrapper): + def close(self): pass # Keep wrapped buffer open. + +# File uploads (which are implemented as empty FiledStorage instances...) +# have a negative truth value. That makes no sense, here is a fix. +class FieldStorage(cgi.FieldStorage): + def __nonzero__(self): return bool(self.list or self.file) + if py3k: __bool__ = __nonzero__ + +# A bug in functools causes it to break if the wrapper is an instance method +def update_wrapper(wrapper, wrapped, *a, **ka): + try: functools.update_wrapper(wrapper, wrapped, *a, **ka) + except AttributeError: pass + + + +# These helpers are used at module level and need to be defined first. +# And yes, I know PEP-8, but sometimes a lower-case classname makes more sense. + +def depr(message): + warnings.warn(message, DeprecationWarning, stacklevel=3) + +def makelist(data): # This is just to handy + if isinstance(data, (tuple, list, set, dict)): return list(data) + elif data: return [data] + else: return [] + + +class DictProperty(object): + ''' Property that maps to a key in a local dict-like attribute. ''' + def __init__(self, attr, key=None, read_only=False): + self.attr, self.key, self.read_only = attr, key, read_only + + def __call__(self, func): + functools.update_wrapper(self, func, updated=[]) + self.getter, self.key = func, self.key or func.__name__ + return self + + def __get__(self, obj, cls): + if obj is None: return self + key, storage = self.key, getattr(obj, self.attr) + if key not in storage: storage[key] = self.getter(obj) + return storage[key] + + def __set__(self, obj, value): + if self.read_only: raise AttributeError("Read-Only property.") + getattr(obj, self.attr)[self.key] = value + + def __delete__(self, obj): + if self.read_only: raise AttributeError("Read-Only property.") + del getattr(obj, self.attr)[self.key] + + +class cached_property(object): + ''' A property that is only computed once per instance and then replaces + itself with an ordinary attribute. Deleting the attribute resets the + property. ''' + + def __init__(self, func): + self.func = func + + def __get__(self, obj, cls): + if obj is None: return self + value = obj.__dict__[self.func.__name__] = self.func(obj) + return value + + +class lazy_attribute(object): + ''' A property that caches itself to the class object. ''' + def __init__(self, func): + functools.update_wrapper(self, func, updated=[]) + self.getter = func + + def __get__(self, obj, cls): + value = self.getter(cls) + setattr(cls, self.__name__, value) + return value + + + + + + +############################################################################### +# Exceptions and Events ######################################################## +############################################################################### + + +class BottleException(Exception): + """ A base class for exceptions used by bottle. """ + pass + + + + + + +############################################################################### +# Routing ###################################################################### +############################################################################### + + +class RouteError(BottleException): + """ This is a base class for all routing related exceptions """ + + +class RouteReset(BottleException): + """ If raised by a plugin or request handler, the route is reset and all + plugins are re-applied. """ + +class RouterUnknownModeError(RouteError): pass + + +class RouteSyntaxError(RouteError): + """ The route parser found something not supported by this router """ + + +class RouteBuildError(RouteError): + """ The route could not been built """ + + +class Router(object): + ''' A Router is an ordered collection of route->target pairs. It is used to + efficiently match WSGI requests against a number of routes and return + the first target that satisfies the request. The target may be anything, + usually a string, ID or callable object. A route consists of a path-rule + and a HTTP method. + + The path-rule is either a static path (e.g. `/contact`) or a dynamic + path that contains wildcards (e.g. `/wiki/`). The wildcard syntax + and details on the matching order are described in docs:`routing`. + ''' + + default_pattern = '[^/]+' + default_filter = 're' + #: Sorry for the mess. It works. Trust me. + rule_syntax = re.compile('(\\\\*)'\ + '(?:(?::([a-zA-Z_][a-zA-Z_0-9]*)?()(?:#(.*?)#)?)'\ + '|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)'\ + '(?::((?:\\\\.|[^\\\\>]+)+)?)?)?>))') + + def __init__(self, strict=False): + self.rules = {} # A {rule: Rule} mapping + self.builder = {} # A rule/name->build_info mapping + self.static = {} # Cache for static routes: {path: {method: target}} + self.dynamic = [] # Cache for dynamic routes. See _compile() + #: If true, static routes are no longer checked first. + self.strict_order = strict + self.filters = {'re': self.re_filter, 'int': self.int_filter, + 'float': self.float_filter, 'path': self.path_filter} + + def re_filter(self, conf): + return conf or self.default_pattern, None, None + + def int_filter(self, conf): + return r'-?\d+', int, lambda x: str(int(x)) + + def float_filter(self, conf): + return r'-?[\d.]+', float, lambda x: str(float(x)) + + def path_filter(self, conf): + return r'.+?', None, None + + def add_filter(self, name, func): + ''' Add a filter. The provided function is called with the configuration + string as parameter and must return a (regexp, to_python, to_url) tuple. + The first element is a string, the last two are callables or None. ''' + self.filters[name] = func + + def parse_rule(self, rule): + ''' Parses a rule into a (name, filter, conf) token stream. If mode is + None, name contains a static rule part. ''' + offset, prefix = 0, '' + for match in self.rule_syntax.finditer(rule): + prefix += rule[offset:match.start()] + g = match.groups() + if len(g[0])%2: # Escaped wildcard + prefix += match.group(0)[len(g[0]):] + offset = match.end() + continue + if prefix: yield prefix, None, None + name, filtr, conf = g[1:4] if not g[2] is None else g[4:7] + if not filtr: filtr = self.default_filter + yield name, filtr, conf or None + offset, prefix = match.end(), '' + if offset <= len(rule) or prefix: + yield prefix+rule[offset:], None, None + + def add(self, rule, method, target, name=None): + ''' Add a new route or replace the target for an existing route. ''' + if rule in self.rules: + self.rules[rule][method] = target + if name: self.builder[name] = self.builder[rule] + return + + target = self.rules[rule] = {method: target} + + # Build pattern and other structures for dynamic routes + anons = 0 # Number of anonymous wildcards + pattern = '' # Regular expression pattern + filters = [] # Lists of wildcard input filters + builder = [] # Data structure for the URL builder + is_static = True + for key, mode, conf in self.parse_rule(rule): + if mode: + is_static = False + mask, in_filter, out_filter = self.filters[mode](conf) + if key: + pattern += '(?P<%s>%s)' % (key, mask) + else: + pattern += '(?:%s)' % mask + key = 'anon%d' % anons; anons += 1 + if in_filter: filters.append((key, in_filter)) + builder.append((key, out_filter or str)) + elif key: + pattern += re.escape(key) + builder.append((None, key)) + self.builder[rule] = builder + if name: self.builder[name] = builder + + if is_static and not self.strict_order: + self.static[self.build(rule)] = target + return + + def fpat_sub(m): + return m.group(0) if len(m.group(1)) % 2 else m.group(1) + '(?:' + flat_pattern = re.sub(r'(\\*)(\(\?P<[^>]*>|\((?!\?))', fpat_sub, pattern) + + try: + re_match = re.compile('^(%s)$' % pattern).match + except re.error: + raise RouteSyntaxError("Could not add Route: %s (%s)" % (rule, _e())) + + def match(path): + """ Return an url-argument dictionary. """ + url_args = re_match(path).groupdict() + for name, wildcard_filter in filters: + try: + url_args[name] = wildcard_filter(url_args[name]) + except ValueError: + raise HTTPError(400, 'Path has wrong format.') + return url_args + + try: + combined = '%s|(^%s$)' % (self.dynamic[-1][0].pattern, flat_pattern) + self.dynamic[-1] = (re.compile(combined), self.dynamic[-1][1]) + self.dynamic[-1][1].append((match, target)) + except (AssertionError, IndexError): # AssertionError: Too many groups + self.dynamic.append((re.compile('(^%s$)' % flat_pattern), + [(match, target)])) + return match + + def build(self, _name, *anons, **query): + ''' Build an URL by filling the wildcards in a rule. ''' + builder = self.builder.get(_name) + if not builder: raise RouteBuildError("No route with that name.", _name) + try: + for i, value in enumerate(anons): query['anon%d'%i] = value + url = ''.join([f(query.pop(n)) if n else f for (n,f) in builder]) + return url if not query else url+'?'+urlencode(query) + except KeyError: + raise RouteBuildError('Missing URL argument: %r' % _e().args[0]) + + def match(self, environ): + ''' Return a (target, url_agrs) tuple or raise HTTPError(400/404/405). ''' + path, targets, urlargs = environ['PATH_INFO'] or '/', None, {} + if path in self.static: + targets = self.static[path] + else: + for combined, rules in self.dynamic: + match = combined.match(path) + if not match: continue + getargs, targets = rules[match.lastindex - 1] + urlargs = getargs(path) if getargs else {} + break + + if not targets: + raise HTTPError(404, "Not found: " + repr(environ['PATH_INFO'])) + method = environ['REQUEST_METHOD'].upper() + if method in targets: + return targets[method], urlargs + if method == 'HEAD' and 'GET' in targets: + return targets['GET'], urlargs + if 'ANY' in targets: + return targets['ANY'], urlargs + allowed = [verb for verb in targets if verb != 'ANY'] + if 'GET' in allowed and 'HEAD' not in allowed: + allowed.append('HEAD') + raise HTTPError(405, "Method not allowed.", Allow=",".join(allowed)) + + +class Route(object): + ''' This class wraps a route callback along with route specific metadata and + configuration and applies Plugins on demand. It is also responsible for + turing an URL path rule into a regular expression usable by the Router. + ''' + + def __init__(self, app, rule, method, callback, name=None, + plugins=None, skiplist=None, **config): + #: The application this route is installed to. + self.app = app + #: The path-rule string (e.g. ``/wiki/:page``). + self.rule = rule + #: The HTTP method as a string (e.g. ``GET``). + self.method = method + #: The original callback with no plugins applied. Useful for introspection. + self.callback = callback + #: The name of the route (if specified) or ``None``. + self.name = name or None + #: A list of route-specific plugins (see :meth:`Bottle.route`). + self.plugins = plugins or [] + #: A list of plugins to not apply to this route (see :meth:`Bottle.route`). + self.skiplist = skiplist or [] + #: Additional keyword arguments passed to the :meth:`Bottle.route` + #: decorator are stored in this dictionary. Used for route-specific + #: plugin configuration and meta-data. + self.config = ConfigDict(config) + + def __call__(self, *a, **ka): + depr("Some APIs changed to return Route() instances instead of"\ + " callables. Make sure to use the Route.call method and not to"\ + " call Route instances directly.") + return self.call(*a, **ka) + + @cached_property + def call(self): + ''' The route callback with all plugins applied. This property is + created on demand and then cached to speed up subsequent requests.''' + return self._make_callback() + + def reset(self): + ''' Forget any cached values. The next time :attr:`call` is accessed, + all plugins are re-applied. ''' + self.__dict__.pop('call', None) + + def prepare(self): + ''' Do all on-demand work immediately (useful for debugging).''' + self.call + + @property + def _context(self): + depr('Switch to Plugin API v2 and access the Route object directly.') + return dict(rule=self.rule, method=self.method, callback=self.callback, + name=self.name, app=self.app, config=self.config, + apply=self.plugins, skip=self.skiplist) + + def all_plugins(self): + ''' Yield all Plugins affecting this route. ''' + unique = set() + for p in reversed(self.app.plugins + self.plugins): + if True in self.skiplist: break + name = getattr(p, 'name', False) + if name and (name in self.skiplist or name in unique): continue + if p in self.skiplist or type(p) in self.skiplist: continue + if name: unique.add(name) + yield p + + def _make_callback(self): + callback = self.callback + for plugin in self.all_plugins(): + try: + if hasattr(plugin, 'apply'): + api = getattr(plugin, 'api', 1) + context = self if api > 1 else self._context + callback = plugin.apply(callback, context) + else: + callback = plugin(callback) + except RouteReset: # Try again with changed configuration. + return self._make_callback() + if not callback is self.callback: + update_wrapper(callback, self.callback) + return callback + + def __repr__(self): + return '<%s %r %r>' % (self.method, self.rule, self.callback) + + + + + + +############################################################################### +# Application Object ########################################################### +############################################################################### + + +class Bottle(object): + """ Each Bottle object represents a single, distinct web application and + consists of routes, callbacks, plugins, resources and configuration. + Instances are callable WSGI applications. + + :param catchall: If true (default), handle all exceptions. Turn off to + let debugging middleware handle exceptions. + """ + + def __init__(self, catchall=True, autojson=True): + #: If true, most exceptions are caught and returned as :exc:`HTTPError` + self.catchall = catchall + + #: A :class:`ResourceManager` for application files + self.resources = ResourceManager() + + #: A :class:`ConfigDict` for app specific configuration. + self.config = ConfigDict() + self.config.autojson = autojson + + self.routes = [] # List of installed :class:`Route` instances. + self.router = Router() # Maps requests to :class:`Route` instances. + self.error_handler = {} + + # Core plugins + self.plugins = [] # List of installed plugins. + self.hooks = HooksPlugin() + self.install(self.hooks) + if self.config.autojson: + self.install(JSONPlugin()) + self.install(TemplatePlugin()) + + + def mount(self, prefix, app, **options): + ''' Mount an application (:class:`Bottle` or plain WSGI) to a specific + URL prefix. Example:: + + root_app.mount('/admin/', admin_app) + + :param prefix: path prefix or `mount-point`. If it ends in a slash, + that slash is mandatory. + :param app: an instance of :class:`Bottle` or a WSGI application. + + All other parameters are passed to the underlying :meth:`route` call. + ''' + if isinstance(app, basestring): + prefix, app = app, prefix + depr('Parameter order of Bottle.mount() changed.') # 0.10 + + segments = [p for p in prefix.split('/') if p] + if not segments: raise ValueError('Empty path prefix.') + path_depth = len(segments) + + def mountpoint_wrapper(): + try: + request.path_shift(path_depth) + rs = BaseResponse([], 200) + def start_response(status, header): + rs.status = status + for name, value in header: rs.add_header(name, value) + return rs.body.append + body = app(request.environ, start_response) + body = itertools.chain(rs.body, body) + return HTTPResponse(body, rs.status_code, **rs.headers) + finally: + request.path_shift(-path_depth) + + options.setdefault('skip', True) + options.setdefault('method', 'ANY') + options.setdefault('mountpoint', {'prefix': prefix, 'target': app}) + options['callback'] = mountpoint_wrapper + + self.route('/%s/<:re:.*>' % '/'.join(segments), **options) + if not prefix.endswith('/'): + self.route('/' + '/'.join(segments), **options) + + def merge(self, routes): + ''' Merge the routes of another :class:`Bottle` application or a list of + :class:`Route` objects into this application. The routes keep their + 'owner', meaning that the :data:`Route.app` attribute is not + changed. ''' + if isinstance(routes, Bottle): + routes = routes.routes + for route in routes: + self.add_route(route) + + def install(self, plugin): + ''' Add a plugin to the list of plugins and prepare it for being + applied to all routes of this application. A plugin may be a simple + decorator or an object that implements the :class:`Plugin` API. + ''' + if hasattr(plugin, 'setup'): plugin.setup(self) + if not callable(plugin) and not hasattr(plugin, 'apply'): + raise TypeError("Plugins must be callable or implement .apply()") + self.plugins.append(plugin) + self.reset() + return plugin + + def uninstall(self, plugin): + ''' Uninstall plugins. Pass an instance to remove a specific plugin, a type + object to remove all plugins that match that type, a string to remove + all plugins with a matching ``name`` attribute or ``True`` to remove all + plugins. Return the list of removed plugins. ''' + removed, remove = [], plugin + for i, plugin in list(enumerate(self.plugins))[::-1]: + if remove is True or remove is plugin or remove is type(plugin) \ + or getattr(plugin, 'name', True) == remove: + removed.append(plugin) + del self.plugins[i] + if hasattr(plugin, 'close'): plugin.close() + if removed: self.reset() + return removed + + def run(self, **kwargs): + ''' Calls :func:`run` with the same parameters. ''' + run(self, **kwargs) + + def reset(self, route=None): + ''' Reset all routes (force plugins to be re-applied) and clear all + caches. If an ID or route object is given, only that specific route + is affected. ''' + if route is None: routes = self.routes + elif isinstance(route, Route): routes = [route] + else: routes = [self.routes[route]] + for route in routes: route.reset() + if DEBUG: + for route in routes: route.prepare() + self.hooks.trigger('app_reset') + + def close(self): + ''' Close the application and all installed plugins. ''' + for plugin in self.plugins: + if hasattr(plugin, 'close'): plugin.close() + self.stopped = True + + def match(self, environ): + """ Search for a matching route and return a (:class:`Route` , urlargs) + tuple. The second value is a dictionary with parameters extracted + from the URL. Raise :exc:`HTTPError` (404/405) on a non-match.""" + return self.router.match(environ) + + def get_url(self, routename, **kargs): + """ Return a string that matches a named route """ + scriptname = request.environ.get('SCRIPT_NAME', '').strip('/') + '/' + location = self.router.build(routename, **kargs).lstrip('/') + return urljoin(urljoin('/', scriptname), location) + + def add_route(self, route): + ''' Add a route object, but do not change the :data:`Route.app` + attribute.''' + self.routes.append(route) + self.router.add(route.rule, route.method, route, name=route.name) + if DEBUG: route.prepare() + + def route(self, path=None, method='GET', callback=None, name=None, + apply=None, skip=None, **config): + """ A decorator to bind a function to a request URL. Example:: + + @app.route('/hello/:name') + def hello(name): + return 'Hello %s' % name + + The ``:name`` part is a wildcard. See :class:`Router` for syntax + details. + + :param path: Request path or a list of paths to listen to. If no + path is specified, it is automatically generated from the + signature of the function. + :param method: HTTP method (`GET`, `POST`, `PUT`, ...) or a list of + methods to listen to. (default: `GET`) + :param callback: An optional shortcut to avoid the decorator + syntax. ``route(..., callback=func)`` equals ``route(...)(func)`` + :param name: The name for this route. (default: None) + :param apply: A decorator or plugin or a list of plugins. These are + applied to the route callback in addition to installed plugins. + :param skip: A list of plugins, plugin classes or names. Matching + plugins are not installed to this route. ``True`` skips all. + + Any additional keyword arguments are stored as route-specific + configuration and passed to plugins (see :meth:`Plugin.apply`). + """ + if callable(path): path, callback = None, path + plugins = makelist(apply) + skiplist = makelist(skip) + def decorator(callback): + # TODO: Documentation and tests + if isinstance(callback, basestring): callback = load(callback) + for rule in makelist(path) or yieldroutes(callback): + for verb in makelist(method): + verb = verb.upper() + route = Route(self, rule, verb, callback, name=name, + plugins=plugins, skiplist=skiplist, **config) + self.add_route(route) + return callback + return decorator(callback) if callback else decorator + + def get(self, path=None, method='GET', **options): + """ Equals :meth:`route`. """ + return self.route(path, method, **options) + + def post(self, path=None, method='POST', **options): + """ Equals :meth:`route` with a ``POST`` method parameter. """ + return self.route(path, method, **options) + + def put(self, path=None, method='PUT', **options): + """ Equals :meth:`route` with a ``PUT`` method parameter. """ + return self.route(path, method, **options) + + def delete(self, path=None, method='DELETE', **options): + """ Equals :meth:`route` with a ``DELETE`` method parameter. """ + return self.route(path, method, **options) + + def error(self, code=500): + """ Decorator: Register an output handler for a HTTP error code""" + def wrapper(handler): + self.error_handler[int(code)] = handler + return handler + return wrapper + + def hook(self, name): + """ Return a decorator that attaches a callback to a hook. Three hooks + are currently implemented: + + - before_request: Executed once before each request + - after_request: Executed once after each request + - app_reset: Called whenever :meth:`reset` is called. + """ + def wrapper(func): + self.hooks.add(name, func) + return func + return wrapper + + def handle(self, path, method='GET'): + """ (deprecated) Execute the first matching route callback and return + the result. :exc:`HTTPResponse` exceptions are caught and returned. + If :attr:`Bottle.catchall` is true, other exceptions are caught as + well and returned as :exc:`HTTPError` instances (500). + """ + depr("This method will change semantics in 0.10. Try to avoid it.") + if isinstance(path, dict): + return self._handle(path) + return self._handle({'PATH_INFO': path, 'REQUEST_METHOD': method.upper()}) + + def default_error_handler(self, res): + return tob(template(ERROR_PAGE_TEMPLATE, e=res)) + + def _handle(self, environ): + try: + environ['bottle.app'] = self + request.bind(environ) + response.bind() + route, args = self.router.match(environ) + environ['route.handle'] = route + environ['bottle.route'] = route + environ['route.url_args'] = args + return route.call(**args) + except HTTPResponse: + return _e() + except RouteReset: + route.reset() + return self._handle(environ) + except (KeyboardInterrupt, SystemExit, MemoryError): + raise + except Exception: + if not self.catchall: raise + stacktrace = format_exc() + environ['wsgi.errors'].write(stacktrace) + return HTTPError(500, "Internal Server Error", _e(), stacktrace) + + def _cast(self, out, peek=None): + """ Try to convert the parameter into something WSGI compatible and set + correct HTTP headers when possible. + Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like, + iterable of strings and iterable of unicodes + """ + + # Empty output is done here + if not out: + if 'Content-Length' not in response: + response['Content-Length'] = 0 + return [] + # Join lists of byte or unicode strings. Mixed lists are NOT supported + if isinstance(out, (tuple, list))\ + and isinstance(out[0], (bytes, unicode)): + out = out[0][0:0].join(out) # b'abc'[0:0] -> b'' + # Encode unicode strings + if isinstance(out, unicode): + out = out.encode(response.charset) + # Byte Strings are just returned + if isinstance(out, bytes): + if 'Content-Length' not in response: + response['Content-Length'] = len(out) + return [out] + # HTTPError or HTTPException (recursive, because they may wrap anything) + # TODO: Handle these explicitly in handle() or make them iterable. + if isinstance(out, HTTPError): + out.apply(response) + out = self.error_handler.get(out.status_code, self.default_error_handler)(out) + return self._cast(out) + if isinstance(out, HTTPResponse): + out.apply(response) + return self._cast(out.body) + + # File-like objects. + if hasattr(out, 'read'): + if 'wsgi.file_wrapper' in request.environ: + return request.environ['wsgi.file_wrapper'](out) + elif hasattr(out, 'close') or not hasattr(out, '__iter__'): + return WSGIFileWrapper(out) + + # Handle Iterables. We peek into them to detect their inner type. + try: + out = iter(out) + first = next(out) + while not first: + first = next(out) + except StopIteration: + return self._cast('') + except HTTPResponse: + first = _e() + except (KeyboardInterrupt, SystemExit, MemoryError): + raise + except Exception: + if not self.catchall: raise + first = HTTPError(500, 'Unhandled exception', _e(), format_exc()) + + # These are the inner types allowed in iterator or generator objects. + if isinstance(first, HTTPResponse): + return self._cast(first) + if isinstance(first, bytes): + return itertools.chain([first], out) + if isinstance(first, unicode): + return imap(lambda x: x.encode(response.charset), + itertools.chain([first], out)) + return self._cast(HTTPError(500, 'Unsupported response type: %s'\ + % type(first))) + + def wsgi(self, environ, start_response): + """ The bottle WSGI-interface. """ + try: + out = self._cast(self._handle(environ)) + # rfc2616 section 4.3 + if response._status_code in (100, 101, 204, 304)\ + or environ['REQUEST_METHOD'] == 'HEAD': + if hasattr(out, 'close'): out.close() + out = [] + start_response(response._status_line, response.headerlist) + return out + except (KeyboardInterrupt, SystemExit, MemoryError): + raise + except Exception: + if not self.catchall: raise + err = '

Critical error while processing request: %s

' \ + % html_escape(environ.get('PATH_INFO', '/')) + if DEBUG: + err += '

Error:

\n
\n%s\n
\n' \ + '

Traceback:

\n
\n%s\n
\n' \ + % (html_escape(repr(_e())), html_escape(format_exc())) + environ['wsgi.errors'].write(err) + headers = [('Content-Type', 'text/html; charset=UTF-8')] + start_response('500 INTERNAL SERVER ERROR', headers) + return [tob(err)] + + def __call__(self, environ, start_response): + ''' Each instance of :class:'Bottle' is a WSGI application. ''' + return self.wsgi(environ, start_response) + + + + + + +############################################################################### +# HTTP and WSGI Tools ########################################################## +############################################################################### + + +class BaseRequest(object): + """ A wrapper for WSGI environment dictionaries that adds a lot of + convenient access methods and properties. Most of them are read-only. + + Adding new attributes to a request actually adds them to the environ + dictionary (as 'bottle.request.ext.'). This is the recommended + way to store and access request-specific data. + """ + + __slots__ = ('environ') + + #: Maximum size of memory buffer for :attr:`body` in bytes. + MEMFILE_MAX = 102400 + #: Maximum number pr GET or POST parameters per request + MAX_PARAMS = 100 + + def __init__(self, environ=None): + """ Wrap a WSGI environ dictionary. """ + #: The wrapped WSGI environ dictionary. This is the only real attribute. + #: All other attributes actually are read-only properties. + self.environ = {} if environ is None else environ + self.environ['bottle.request'] = self + + @DictProperty('environ', 'bottle.app', read_only=True) + def app(self): + ''' Bottle application handling this request. ''' + raise RuntimeError('This request is not connected to an application.') + + @property + def path(self): + ''' The value of ``PATH_INFO`` with exactly one prefixed slash (to fix + broken clients and avoid the "empty path" edge case). ''' + return '/' + self.environ.get('PATH_INFO','').lstrip('/') + + @property + def method(self): + ''' The ``REQUEST_METHOD`` value as an uppercase string. ''' + return self.environ.get('REQUEST_METHOD', 'GET').upper() + + @DictProperty('environ', 'bottle.request.headers', read_only=True) + def headers(self): + ''' A :class:`WSGIHeaderDict` that provides case-insensitive access to + HTTP request headers. ''' + return WSGIHeaderDict(self.environ) + + def get_header(self, name, default=None): + ''' Return the value of a request header, or a given default value. ''' + return self.headers.get(name, default) + + @DictProperty('environ', 'bottle.request.cookies', read_only=True) + def cookies(self): + """ Cookies parsed into a :class:`FormsDict`. Signed cookies are NOT + decoded. Use :meth:`get_cookie` if you expect signed cookies. """ + cookies = SimpleCookie(self.environ.get('HTTP_COOKIE','')) + cookies = list(cookies.values())[:self.MAX_PARAMS] + return FormsDict((c.key, c.value) for c in cookies) + + def get_cookie(self, key, default=None, secret=None): + """ Return the content of a cookie. To read a `Signed Cookie`, the + `secret` must match the one used to create the cookie (see + :meth:`BaseResponse.set_cookie`). If anything goes wrong (missing + cookie or wrong signature), return a default value. """ + value = self.cookies.get(key) + if secret and value: + dec = cookie_decode(value, secret) # (key, value) tuple or None + return dec[1] if dec and dec[0] == key else default + return value or default + + @DictProperty('environ', 'bottle.request.query', read_only=True) + def query(self): + ''' The :attr:`query_string` parsed into a :class:`FormsDict`. These + values are sometimes called "URL arguments" or "GET parameters", but + not to be confused with "URL wildcards" as they are provided by the + :class:`Router`. ''' + get = self.environ['bottle.get'] = FormsDict() + pairs = _parse_qsl(self.environ.get('QUERY_STRING', '')) + for key, value in pairs[:self.MAX_PARAMS]: + get[key] = value + return get + + @DictProperty('environ', 'bottle.request.forms', read_only=True) + def forms(self): + """ Form values parsed from an `url-encoded` or `multipart/form-data` + encoded POST or PUT request body. The result is retuned as a + :class:`FormsDict`. All keys and values are strings. File uploads + are stored separately in :attr:`files`. """ + forms = FormsDict() + for name, item in self.POST.allitems(): + if not hasattr(item, 'filename'): + forms[name] = item + return forms + + @DictProperty('environ', 'bottle.request.params', read_only=True) + def params(self): + """ A :class:`FormsDict` with the combined values of :attr:`query` and + :attr:`forms`. File uploads are stored in :attr:`files`. """ + params = FormsDict() + for key, value in self.query.allitems(): + params[key] = value + for key, value in self.forms.allitems(): + params[key] = value + return params + + @DictProperty('environ', 'bottle.request.files', read_only=True) + def files(self): + """ File uploads parsed from an `url-encoded` or `multipart/form-data` + encoded POST or PUT request body. The values are instances of + :class:`cgi.FieldStorage`. The most important attributes are: + + filename + The filename, if specified; otherwise None; this is the client + side filename, *not* the file name on which it is stored (that's + a temporary file you don't deal with) + file + The file(-like) object from which you can read the data. + value + The value as a *string*; for file uploads, this transparently + reads the file every time you request the value. Do not do this + on big files. + """ + files = FormsDict() + for name, item in self.POST.allitems(): + if hasattr(item, 'filename'): + files[name] = item + return files + + @DictProperty('environ', 'bottle.request.json', read_only=True) + def json(self): + ''' If the ``Content-Type`` header is ``application/json``, this + property holds the parsed content of the request body. Only requests + smaller than :attr:`MEMFILE_MAX` are processed to avoid memory + exhaustion. ''' + if 'application/json' in self.environ.get('CONTENT_TYPE', '') \ + and 0 < self.content_length < self.MEMFILE_MAX: + return json_loads(self.body.read(self.MEMFILE_MAX)) + return None + + @DictProperty('environ', 'bottle.request.body', read_only=True) + def _body(self): + maxread = max(0, self.content_length) + stream = self.environ['wsgi.input'] + body = BytesIO() if maxread < self.MEMFILE_MAX else TemporaryFile(mode='w+b') + while maxread > 0: + part = stream.read(min(maxread, self.MEMFILE_MAX)) + if not part: break + body.write(part) + maxread -= len(part) + self.environ['wsgi.input'] = body + body.seek(0) + return body + + @property + def body(self): + """ The HTTP request body as a seek-able file-like object. Depending on + :attr:`MEMFILE_MAX`, this is either a temporary file or a + :class:`io.BytesIO` instance. Accessing this property for the first + time reads and replaces the ``wsgi.input`` environ variable. + Subsequent accesses just do a `seek(0)` on the file object. """ + self._body.seek(0) + return self._body + + #: An alias for :attr:`query`. + GET = query + + @DictProperty('environ', 'bottle.request.post', read_only=True) + def POST(self): + """ The values of :attr:`forms` and :attr:`files` combined into a single + :class:`FormsDict`. Values are either strings (form values) or + instances of :class:`cgi.FieldStorage` (file uploads). + """ + post = FormsDict() + # We default to application/x-www-form-urlencoded for everything that + # is not multipart and take the fast path (also: 3.1 workaround) + if not self.content_type.startswith('multipart/'): + maxlen = max(0, min(self.content_length, self.MEMFILE_MAX)) + pairs = _parse_qsl(tonat(self.body.read(maxlen), 'latin1')) + for key, value in pairs[:self.MAX_PARAMS]: + post[key] = value + return post + + safe_env = {'QUERY_STRING':''} # Build a safe environment for cgi + for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'): + if key in self.environ: safe_env[key] = self.environ[key] + args = dict(fp=self.body, environ=safe_env, keep_blank_values=True) + if py31: + args['fp'] = NCTextIOWrapper(args['fp'], encoding='ISO-8859-1', + newline='\n') + elif py3k: + args['encoding'] = 'ISO-8859-1' + data = FieldStorage(**args) + for item in (data.list or [])[:self.MAX_PARAMS]: + post[item.name] = item if item.filename else item.value + return post + + @property + def COOKIES(self): + ''' Alias for :attr:`cookies` (deprecated). ''' + depr('BaseRequest.COOKIES was renamed to BaseRequest.cookies (lowercase).') + return self.cookies + + @property + def url(self): + """ The full request URI including hostname and scheme. If your app + lives behind a reverse proxy or load balancer and you get confusing + results, make sure that the ``X-Forwarded-Host`` header is set + correctly. """ + return self.urlparts.geturl() + + @DictProperty('environ', 'bottle.request.urlparts', read_only=True) + def urlparts(self): + ''' The :attr:`url` string as an :class:`urlparse.SplitResult` tuple. + The tuple contains (scheme, host, path, query_string and fragment), + but the fragment is always empty because it is not visible to the + server. ''' + env = self.environ + http = env.get('HTTP_X_FORWARDED_PROTO') or env.get('wsgi.url_scheme', 'http') + host = env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST') + if not host: + # HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients. + host = env.get('SERVER_NAME', '127.0.0.1') + port = env.get('SERVER_PORT') + if port and port != ('80' if http == 'http' else '443'): + host += ':' + port + path = urlquote(self.fullpath) + return UrlSplitResult(http, host, path, env.get('QUERY_STRING'), '') + + @property + def fullpath(self): + """ Request path including :attr:`script_name` (if present). """ + return urljoin(self.script_name, self.path.lstrip('/')) + + @property + def query_string(self): + """ The raw :attr:`query` part of the URL (everything in between ``?`` + and ``#``) as a string. """ + return self.environ.get('QUERY_STRING', '') + + @property + def script_name(self): + ''' The initial portion of the URL's `path` that was removed by a higher + level (server or routing middleware) before the application was + called. This script path is returned with leading and tailing + slashes. ''' + script_name = self.environ.get('SCRIPT_NAME', '').strip('/') + return '/' + script_name + '/' if script_name else '/' + + def path_shift(self, shift=1): + ''' Shift path segments from :attr:`path` to :attr:`script_name` and + vice versa. + + :param shift: The number of path segments to shift. May be negative + to change the shift direction. (default: 1) + ''' + script = self.environ.get('SCRIPT_NAME','/') + self['SCRIPT_NAME'], self['PATH_INFO'] = path_shift(script, self.path, shift) + + @property + def content_length(self): + ''' The request body length as an integer. The client is responsible to + set this header. Otherwise, the real length of the body is unknown + and -1 is returned. In this case, :attr:`body` will be empty. ''' + return int(self.environ.get('CONTENT_LENGTH') or -1) + + @property + def content_type(self): + ''' The Content-Type header as a lowercase-string (default: empty). ''' + return self.environ.get('CONTENT_TYPE', '').lower() + + @property + def is_xhr(self): + ''' True if the request was triggered by a XMLHttpRequest. This only + works with JavaScript libraries that support the `X-Requested-With` + header (most of the popular libraries do). ''' + requested_with = self.environ.get('HTTP_X_REQUESTED_WITH','') + return requested_with.lower() == 'xmlhttprequest' + + @property + def is_ajax(self): + ''' Alias for :attr:`is_xhr`. "Ajax" is not the right term. ''' + return self.is_xhr + + @property + def auth(self): + """ HTTP authentication data as a (user, password) tuple. This + implementation currently supports basic (not digest) authentication + only. If the authentication happened at a higher level (e.g. in the + front web-server or a middleware), the password field is None, but + the user field is looked up from the ``REMOTE_USER`` environ + variable. On any errors, None is returned. """ + basic = parse_auth(self.environ.get('HTTP_AUTHORIZATION','')) + if basic: return basic + ruser = self.environ.get('REMOTE_USER') + if ruser: return (ruser, None) + return None + + @property + def remote_route(self): + """ A list of all IPs that were involved in this request, starting with + the client IP and followed by zero or more proxies. This does only + work if all proxies support the ```X-Forwarded-For`` header. Note + that this information can be forged by malicious clients. """ + proxy = self.environ.get('HTTP_X_FORWARDED_FOR') + if proxy: return [ip.strip() for ip in proxy.split(',')] + remote = self.environ.get('REMOTE_ADDR') + return [remote] if remote else [] + + @property + def remote_addr(self): + """ The client IP as a string. Note that this information can be forged + by malicious clients. """ + route = self.remote_route + return route[0] if route else None + + def copy(self): + """ Return a new :class:`Request` with a shallow :attr:`environ` copy. """ + return Request(self.environ.copy()) + + def get(self, value, default=None): return self.environ.get(value, default) + def __getitem__(self, key): return self.environ[key] + def __delitem__(self, key): self[key] = ""; del(self.environ[key]) + def __iter__(self): return iter(self.environ) + def __len__(self): return len(self.environ) + def keys(self): return self.environ.keys() + def __setitem__(self, key, value): + """ Change an environ value and clear all caches that depend on it. """ + + if self.environ.get('bottle.request.readonly'): + raise KeyError('The environ dictionary is read-only.') + + self.environ[key] = value + todelete = () + + if key == 'wsgi.input': + todelete = ('body', 'forms', 'files', 'params', 'post', 'json') + elif key == 'QUERY_STRING': + todelete = ('query', 'params') + elif key.startswith('HTTP_'): + todelete = ('headers', 'cookies') + + for key in todelete: + self.environ.pop('bottle.request.'+key, None) + + def __repr__(self): + return '<%s: %s %s>' % (self.__class__.__name__, self.method, self.url) + + def __getattr__(self, name): + ''' Search in self.environ for additional user defined attributes. ''' + try: + var = self.environ['bottle.request.ext.%s'%name] + return var.__get__(self) if hasattr(var, '__get__') else var + except KeyError: + raise AttributeError('Attribute %r not defined.' % name) + + def __setattr__(self, name, value): + if name == 'environ': return object.__setattr__(self, name, value) + self.environ['bottle.request.ext.%s'%name] = value + + + + +def _hkey(s): + return s.title().replace('_','-') + + +class HeaderProperty(object): + def __init__(self, name, reader=None, writer=str, default=''): + self.name, self.default = name, default + self.reader, self.writer = reader, writer + self.__doc__ = 'Current value of the %r header.' % name.title() + + def __get__(self, obj, cls): + if obj is None: return self + value = obj.headers.get(self.name, self.default) + return self.reader(value) if self.reader else value + + def __set__(self, obj, value): + obj.headers[self.name] = self.writer(value) + + def __delete__(self, obj): + del obj.headers[self.name] + + +class BaseResponse(object): + """ Storage class for a response body as well as headers and cookies. + + This class does support dict-like case-insensitive item-access to + headers, but is NOT a dict. Most notably, iterating over a response + yields parts of the body and not the headers. + """ + + default_status = 200 + default_content_type = 'text/html; charset=UTF-8' + + # Header blacklist for specific response codes + # (rfc2616 section 10.2.3 and 10.3.5) + bad_headers = { + 204: set(('Content-Type',)), + 304: set(('Allow', 'Content-Encoding', 'Content-Language', + 'Content-Length', 'Content-Range', 'Content-Type', + 'Content-Md5', 'Last-Modified'))} + + def __init__(self, body='', status=None, **headers): + self._cookies = None + self._headers = {'Content-Type': [self.default_content_type]} + self.body = body + self.status = status or self.default_status + if headers: + for name, value in headers.items(): + self[name] = value + + def copy(self): + ''' Returns a copy of self. ''' + copy = Response() + copy.status = self.status + copy._headers = dict((k, v[:]) for (k, v) in self._headers.items()) + return copy + + def __iter__(self): + return iter(self.body) + + def close(self): + if hasattr(self.body, 'close'): + self.body.close() + + @property + def status_line(self): + ''' The HTTP status line as a string (e.g. ``404 Not Found``).''' + return self._status_line + + @property + def status_code(self): + ''' The HTTP status code as an integer (e.g. 404).''' + return self._status_code + + def _set_status(self, status): + if isinstance(status, int): + code, status = status, _HTTP_STATUS_LINES.get(status) + elif ' ' in status: + status = status.strip() + code = int(status.split()[0]) + else: + raise ValueError('String status line without a reason phrase.') + if not 100 <= code <= 999: raise ValueError('Status code out of range.') + self._status_code = code + self._status_line = str(status or ('%d Unknown' % code)) + + def _get_status(self): + return self._status_line + + status = property(_get_status, _set_status, None, + ''' A writeable property to change the HTTP response status. It accepts + either a numeric code (100-999) or a string with a custom reason + phrase (e.g. "404 Brain not found"). Both :data:`status_line` and + :data:`status_code` are updated accordingly. The return value is + always a status string. ''') + del _get_status, _set_status + + @property + def headers(self): + ''' An instance of :class:`HeaderDict`, a case-insensitive dict-like + view on the response headers. ''' + hdict = HeaderDict() + hdict.dict = self._headers + return hdict + + def __contains__(self, name): return _hkey(name) in self._headers + def __delitem__(self, name): del self._headers[_hkey(name)] + def __getitem__(self, name): return self._headers[_hkey(name)][-1] + def __setitem__(self, name, value): self._headers[_hkey(name)] = [str(value)] + + def get_header(self, name, default=None): + ''' Return the value of a previously defined header. If there is no + header with that name, return a default value. ''' + return self._headers.get(_hkey(name), [default])[-1] + + def set_header(self, name, value): + ''' Create a new response header, replacing any previously defined + headers with the same name. ''' + self._headers[_hkey(name)] = [str(value)] + + def add_header(self, name, value): + ''' Add an additional response header, not removing duplicates. ''' + self._headers.setdefault(_hkey(name), []).append(str(value)) + + def iter_headers(self): + ''' Yield (header, value) tuples, skipping headers that are not + allowed with the current response status code. ''' + return self.headerlist + + def wsgiheader(self): + depr('The wsgiheader method is deprecated. See headerlist.') #0.10 + return self.headerlist + + @property + def headerlist(self): + ''' WSGI conform list of (header, value) tuples. ''' + out = [] + headers = self._headers.items() + if self._status_code in self.bad_headers: + bad_headers = self.bad_headers[self._status_code] + headers = [h for h in headers if h[0] not in bad_headers] + out += [(name, val) for name, vals in headers for val in vals] + if self._cookies: + for c in self._cookies.values(): + out.append(('Set-Cookie', c.OutputString())) + return out + + content_type = HeaderProperty('Content-Type') + content_length = HeaderProperty('Content-Length', reader=int) + + @property + def charset(self): + """ Return the charset specified in the content-type header (default: utf8). """ + if 'charset=' in self.content_type: + return self.content_type.split('charset=')[-1].split(';')[0].strip() + return 'UTF-8' + + @property + def COOKIES(self): + """ A dict-like SimpleCookie instance. This should not be used directly. + See :meth:`set_cookie`. """ + depr('The COOKIES dict is deprecated. Use `set_cookie()` instead.') # 0.10 + if not self._cookies: + self._cookies = SimpleCookie() + return self._cookies + + def set_cookie(self, name, value, secret=None, **options): + ''' Create a new cookie or replace an old one. If the `secret` parameter is + set, create a `Signed Cookie` (described below). + + :param name: the name of the cookie. + :param value: the value of the cookie. + :param secret: a signature key required for signed cookies. + + Additionally, this method accepts all RFC 2109 attributes that are + supported by :class:`cookie.Morsel`, including: + + :param max_age: maximum age in seconds. (default: None) + :param expires: a datetime object or UNIX timestamp. (default: None) + :param domain: the domain that is allowed to read the cookie. + (default: current domain) + :param path: limits the cookie to a given path (default: current path) + :param secure: limit the cookie to HTTPS connections (default: off). + :param httponly: prevents client-side javascript to read this cookie + (default: off, requires Python 2.6 or newer). + + If neither `expires` nor `max_age` is set (default), the cookie will + expire at the end of the browser session (as soon as the browser + window is closed). + + Signed cookies may store any pickle-able object and are + cryptographically signed to prevent manipulation. Keep in mind that + cookies are limited to 4kb in most browsers. + + Warning: Signed cookies are not encrypted (the client can still see + the content) and not copy-protected (the client can restore an old + cookie). The main intention is to make pickling and unpickling + save, not to store secret information at client side. + ''' + if not self._cookies: + self._cookies = SimpleCookie() + + if secret: + value = touni(cookie_encode((name, value), secret)) + elif not isinstance(value, basestring): + raise TypeError('Secret key missing for non-string Cookie.') + + if len(value) > 4096: raise ValueError('Cookie value to long.') + self._cookies[name] = value + + for key, value in options.items(): + if key == 'max_age': + if isinstance(value, timedelta): + value = value.seconds + value.days * 24 * 3600 + if key == 'expires': + if isinstance(value, (datedate, datetime)): + value = value.timetuple() + elif isinstance(value, (int, float)): + value = time.gmtime(value) + value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value) + self._cookies[name][key.replace('_', '-')] = value + + def delete_cookie(self, key, **kwargs): + ''' Delete a cookie. Be sure to use the same `domain` and `path` + settings as used to create the cookie. ''' + kwargs['max_age'] = -1 + kwargs['expires'] = 0 + self.set_cookie(key, '', **kwargs) + + def __repr__(self): + out = '' + for name, value in self.headerlist: + out += '%s: %s\n' % (name.title(), value.strip()) + return out + +#: Thread-local storage for :class:`LocalRequest` and :class:`LocalResponse` +#: attributes. +_lctx = threading.local() + +def local_property(name): + def fget(self): + try: + return getattr(_lctx, name) + except AttributeError: + raise RuntimeError("Request context not initialized.") + def fset(self, value): setattr(_lctx, name, value) + def fdel(self): delattr(_lctx, name) + return property(fget, fset, fdel, + 'Thread-local property stored in :data:`_lctx.%s`' % name) + + +class LocalRequest(BaseRequest): + ''' A thread-local subclass of :class:`BaseRequest` with a different + set of attribues for each thread. There is usually only one global + instance of this class (:data:`request`). If accessed during a + request/response cycle, this instance always refers to the *current* + request (even on a multithreaded server). ''' + bind = BaseRequest.__init__ + environ = local_property('request_environ') + + +class LocalResponse(BaseResponse): + ''' A thread-local subclass of :class:`BaseResponse` with a different + set of attribues for each thread. There is usually only one global + instance of this class (:data:`response`). Its attributes are used + to build the HTTP response at the end of the request/response cycle. + ''' + bind = BaseResponse.__init__ + _status_line = local_property('response_status_line') + _status_code = local_property('response_status_code') + _cookies = local_property('response_cookies') + _headers = local_property('response_headers') + body = local_property('response_body') + +Request = BaseRequest +Response = BaseResponse + +class HTTPResponse(Response, BottleException): + def __init__(self, body='', status=None, header=None, **headers): + if header or 'output' in headers: + depr('Call signature changed (for the better)') + if header: headers.update(header) + if 'output' in headers: body = headers.pop('output') + super(HTTPResponse, self).__init__(body, status, **headers) + + def apply(self, response): + response._status_code = self._status_code + response._status_line = self._status_line + response._headers = self._headers + response._cookies = self._cookies + response.body = self.body + + def _output(self, value=None): + depr('Use HTTPResponse.body instead of HTTPResponse.output') + if value is None: return self.body + self.body = value + + output = property(_output, _output, doc='Alias for .body') + +class HTTPError(HTTPResponse): + default_status = 500 + def __init__(self, status=None, body=None, exception=None, traceback=None, header=None, **headers): + self.exception = exception + self.traceback = traceback + super(HTTPError, self).__init__(body, status, header, **headers) + + + + + +############################################################################### +# Plugins ###################################################################### +############################################################################### + +class PluginError(BottleException): pass + +class JSONPlugin(object): + name = 'json' + api = 2 + + def __init__(self, json_dumps=json_dumps): + self.json_dumps = json_dumps + + def apply(self, callback, route): + dumps = self.json_dumps + if not dumps: return callback + def wrapper(*a, **ka): + rv = callback(*a, **ka) + if isinstance(rv, dict): + #Attempt to serialize, raises exception on failure + json_response = dumps(rv) + #Set content type only if serialization succesful + response.content_type = 'application/json' + return json_response + return rv + return wrapper + + +class HooksPlugin(object): + name = 'hooks' + api = 2 + + _names = 'before_request', 'after_request', 'app_reset' + + def __init__(self): + self.hooks = dict((name, []) for name in self._names) + self.app = None + + def _empty(self): + return not (self.hooks['before_request'] or self.hooks['after_request']) + + def setup(self, app): + self.app = app + + def add(self, name, func): + ''' Attach a callback to a hook. ''' + was_empty = self._empty() + self.hooks.setdefault(name, []).append(func) + if self.app and was_empty and not self._empty(): self.app.reset() + + def remove(self, name, func): + ''' Remove a callback from a hook. ''' + was_empty = self._empty() + if name in self.hooks and func in self.hooks[name]: + self.hooks[name].remove(func) + if self.app and not was_empty and self._empty(): self.app.reset() + + def trigger(self, name, *a, **ka): + ''' Trigger a hook and return a list of results. ''' + hooks = self.hooks[name] + if ka.pop('reversed', False): hooks = hooks[::-1] + return [hook(*a, **ka) for hook in hooks] + + def apply(self, callback, route): + if self._empty(): return callback + def wrapper(*a, **ka): + self.trigger('before_request') + rv = callback(*a, **ka) + self.trigger('after_request', reversed=True) + return rv + return wrapper + + +class TemplatePlugin(object): + ''' This plugin applies the :func:`view` decorator to all routes with a + `template` config parameter. If the parameter is a tuple, the second + element must be a dict with additional options (e.g. `template_engine`) + or default variables for the template. ''' + name = 'template' + api = 2 + + def apply(self, callback, route): + conf = route.config.get('template') + if isinstance(conf, (tuple, list)) and len(conf) == 2: + return view(conf[0], **conf[1])(callback) + elif isinstance(conf, str) and 'template_opts' in route.config: + depr('The `template_opts` parameter is deprecated.') #0.9 + return view(conf, **route.config['template_opts'])(callback) + elif isinstance(conf, str): + return view(conf)(callback) + else: + return callback + + +#: Not a plugin, but part of the plugin API. TODO: Find a better place. +class _ImportRedirect(object): + def __init__(self, name, impmask): + ''' Create a virtual package that redirects imports (see PEP 302). ''' + self.name = name + self.impmask = impmask + self.module = sys.modules.setdefault(name, imp.new_module(name)) + self.module.__dict__.update({'__file__': __file__, '__path__': [], + '__all__': [], '__loader__': self}) + sys.meta_path.append(self) + + def find_module(self, fullname, path=None): + if '.' not in fullname: return + packname, modname = fullname.rsplit('.', 1) + if packname != self.name: return + return self + + def load_module(self, fullname): + if fullname in sys.modules: return sys.modules[fullname] + packname, modname = fullname.rsplit('.', 1) + realname = self.impmask % modname + __import__(realname) + module = sys.modules[fullname] = sys.modules[realname] + setattr(self.module, modname, module) + module.__loader__ = self + return module + + + + + + +############################################################################### +# Common Utilities ############################################################# +############################################################################### + + +class MultiDict(DictMixin): + """ This dict stores multiple values per key, but behaves exactly like a + normal dict in that it returns only the newest value for any given key. + There are special methods available to access the full list of values. + """ + + def __init__(self, *a, **k): + self.dict = dict((k, [v]) for (k, v) in dict(*a, **k).items()) + + def __len__(self): return len(self.dict) + def __iter__(self): return iter(self.dict) + def __contains__(self, key): return key in self.dict + def __delitem__(self, key): del self.dict[key] + def __getitem__(self, key): return self.dict[key][-1] + def __setitem__(self, key, value): self.append(key, value) + def keys(self): return self.dict.keys() + + if py3k: + def values(self): return (v[-1] for v in self.dict.values()) + def items(self): return ((k, v[-1]) for k, v in self.dict.items()) + def allitems(self): + return ((k, v) for k, vl in self.dict.items() for v in vl) + iterkeys = keys + itervalues = values + iteritems = items + iterallitems = allitems + + else: + def values(self): return [v[-1] for v in self.dict.values()] + def items(self): return [(k, v[-1]) for k, v in self.dict.items()] + def iterkeys(self): return self.dict.iterkeys() + def itervalues(self): return (v[-1] for v in self.dict.itervalues()) + def iteritems(self): + return ((k, v[-1]) for k, v in self.dict.iteritems()) + def iterallitems(self): + return ((k, v) for k, vl in self.dict.iteritems() for v in vl) + def allitems(self): + return [(k, v) for k, vl in self.dict.iteritems() for v in vl] + + def get(self, key, default=None, index=-1, type=None): + ''' Return the most recent value for a key. + + :param default: The default value to be returned if the key is not + present or the type conversion fails. + :param index: An index for the list of available values. + :param type: If defined, this callable is used to cast the value + into a specific type. Exception are suppressed and result in + the default value to be returned. + ''' + try: + val = self.dict[key][index] + return type(val) if type else val + except Exception: + pass + return default + + def append(self, key, value): + ''' Add a new value to the list of values for this key. ''' + self.dict.setdefault(key, []).append(value) + + def replace(self, key, value): + ''' Replace the list of values with a single value. ''' + self.dict[key] = [value] + + def getall(self, key): + ''' Return a (possibly empty) list of values for a key. ''' + return self.dict.get(key) or [] + + #: Aliases for WTForms to mimic other multi-dict APIs (Django) + getone = get + getlist = getall + + + +class FormsDict(MultiDict): + ''' This :class:`MultiDict` subclass is used to store request form data. + Additionally to the normal dict-like item access methods (which return + unmodified data as native strings), this container also supports + attribute-like access to its values. Attributes are automatically de- + or recoded to match :attr:`input_encoding` (default: 'utf8'). Missing + attributes default to an empty string. ''' + + #: Encoding used for attribute values. + input_encoding = 'utf8' + #: If true (default), unicode strings are first encoded with `latin1` + #: and then decoded to match :attr:`input_encoding`. + recode_unicode = True + + def _fix(self, s, encoding=None): + if isinstance(s, unicode) and self.recode_unicode: # Python 3 WSGI + s = s.encode('latin1') + if isinstance(s, bytes): # Python 2 WSGI + return s.decode(encoding or self.input_encoding) + return s + + def decode(self, encoding=None): + ''' Returns a copy with all keys and values de- or recoded to match + :attr:`input_encoding`. Some libraries (e.g. WTForms) want a + unicode dictionary. ''' + copy = FormsDict() + enc = copy.input_encoding = encoding or self.input_encoding + copy.recode_unicode = False + for key, value in self.allitems(): + copy.append(self._fix(key, enc), self._fix(value, enc)) + return copy + + def getunicode(self, name, default=None, encoding=None): + try: + return self._fix(self[name], encoding) + except (UnicodeError, KeyError): + return default + + def __getattr__(self, name, default=unicode()): + # Without this guard, pickle generates a cryptic TypeError: + if name.startswith('__') and name.endswith('__'): + return super(FormsDict, self).__getattr__(name) + return self.getunicode(name, default=default) + + +class HeaderDict(MultiDict): + """ A case-insensitive version of :class:`MultiDict` that defaults to + replace the old value instead of appending it. """ + + def __init__(self, *a, **ka): + self.dict = {} + if a or ka: self.update(*a, **ka) + + def __contains__(self, key): return _hkey(key) in self.dict + def __delitem__(self, key): del self.dict[_hkey(key)] + def __getitem__(self, key): return self.dict[_hkey(key)][-1] + def __setitem__(self, key, value): self.dict[_hkey(key)] = [str(value)] + def append(self, key, value): + self.dict.setdefault(_hkey(key), []).append(str(value)) + def replace(self, key, value): self.dict[_hkey(key)] = [str(value)] + def getall(self, key): return self.dict.get(_hkey(key)) or [] + def get(self, key, default=None, index=-1): + return MultiDict.get(self, _hkey(key), default, index) + def filter(self, names): + for name in [_hkey(n) for n in names]: + if name in self.dict: + del self.dict[name] + + +class WSGIHeaderDict(DictMixin): + ''' This dict-like class wraps a WSGI environ dict and provides convenient + access to HTTP_* fields. Keys and values are native strings + (2.x bytes or 3.x unicode) and keys are case-insensitive. If the WSGI + environment contains non-native string values, these are de- or encoded + using a lossless 'latin1' character set. + + The API will remain stable even on changes to the relevant PEPs. + Currently PEP 333, 444 and 3333 are supported. (PEP 444 is the only one + that uses non-native strings.) + ''' + #: List of keys that do not have a ``HTTP_`` prefix. + cgikeys = ('CONTENT_TYPE', 'CONTENT_LENGTH') + + def __init__(self, environ): + self.environ = environ + + def _ekey(self, key): + ''' Translate header field name to CGI/WSGI environ key. ''' + key = key.replace('-','_').upper() + if key in self.cgikeys: + return key + return 'HTTP_' + key + + def raw(self, key, default=None): + ''' Return the header value as is (may be bytes or unicode). ''' + return self.environ.get(self._ekey(key), default) + + def __getitem__(self, key): + return tonat(self.environ[self._ekey(key)], 'latin1') + + def __setitem__(self, key, value): + raise TypeError("%s is read-only." % self.__class__) + + def __delitem__(self, key): + raise TypeError("%s is read-only." % self.__class__) + + def __iter__(self): + for key in self.environ: + if key[:5] == 'HTTP_': + yield key[5:].replace('_', '-').title() + elif key in self.cgikeys: + yield key.replace('_', '-').title() + + def keys(self): return [x for x in self] + def __len__(self): return len(self.keys()) + def __contains__(self, key): return self._ekey(key) in self.environ + + +class ConfigDict(dict): + ''' A dict-subclass with some extras: You can access keys like attributes. + Uppercase attributes create new ConfigDicts and act as name-spaces. + Other missing attributes return None. Calling a ConfigDict updates its + values and returns itself. + + >>> cfg = ConfigDict() + >>> cfg.Namespace.value = 5 + >>> cfg.OtherNamespace(a=1, b=2) + >>> cfg + {'Namespace': {'value': 5}, 'OtherNamespace': {'a': 1, 'b': 2}} + ''' + + def __getattr__(self, key): + if key not in self and key[0].isupper(): + self[key] = ConfigDict() + return self.get(key) + + def __setattr__(self, key, value): + if hasattr(dict, key): + raise AttributeError('Read-only attribute.') + if key in self and self[key] and isinstance(self[key], ConfigDict): + raise AttributeError('Non-empty namespace attribute.') + self[key] = value + + def __delattr__(self, key): + if key in self: del self[key] + + def __call__(self, *a, **ka): + for key, value in dict(*a, **ka).items(): setattr(self, key, value) + return self + + +class AppStack(list): + """ A stack-like list. Calling it returns the head of the stack. """ + + def __call__(self): + """ Return the current default application. """ + return self[-1] + + def push(self, value=None): + """ Add a new :class:`Bottle` instance to the stack """ + if not isinstance(value, Bottle): + value = Bottle() + self.append(value) + return value + + +class WSGIFileWrapper(object): + + def __init__(self, fp, buffer_size=1024*64): + self.fp, self.buffer_size = fp, buffer_size + for attr in ('fileno', 'close', 'read', 'readlines', 'tell', 'seek'): + if hasattr(fp, attr): setattr(self, attr, getattr(fp, attr)) + + def __iter__(self): + buff, read = self.buffer_size, self.read + while True: + part = read(buff) + if not part: return + yield part + + +class ResourceManager(object): + ''' This class manages a list of search paths and helps to find and open + application-bound resources (files). + + :param base: default value for :meth:`add_path` calls. + :param opener: callable used to open resources. + :param cachemode: controls which lookups are cached. One of 'all', + 'found' or 'none'. + ''' + + def __init__(self, base='./', opener=open, cachemode='all'): + self.opener = open + self.base = base + self.cachemode = cachemode + + #: A list of search paths. See :meth:`add_path` for details. + self.path = [] + #: A cache for resolved paths. ``res.cache.clear()`` clears the cache. + self.cache = {} + + def add_path(self, path, base=None, index=None, create=False): + ''' Add a new path to the list of search paths. Return False if the + path does not exist. + + :param path: The new search path. Relative paths are turned into + an absolute and normalized form. If the path looks like a file + (not ending in `/`), the filename is stripped off. + :param base: Path used to absolutize relative search paths. + Defaults to :attr:`base` which defaults to ``os.getcwd()``. + :param index: Position within the list of search paths. Defaults + to last index (appends to the list). + + The `base` parameter makes it easy to reference files installed + along with a python module or package:: + + res.add_path('./resources/', __file__) + ''' + base = os.path.abspath(os.path.dirname(base or self.base)) + path = os.path.abspath(os.path.join(base, os.path.dirname(path))) + path += os.sep + if path in self.path: + self.path.remove(path) + if create and not os.path.isdir(path): + os.makedirs(path) + if index is None: + self.path.append(path) + else: + self.path.insert(index, path) + self.cache.clear() + return os.path.exists(path) + + def __iter__(self): + ''' Iterate over all existing files in all registered paths. ''' + search = self.path[:] + while search: + path = search.pop() + if not os.path.isdir(path): continue + for name in os.listdir(path): + full = os.path.join(path, name) + if os.path.isdir(full): search.append(full) + else: yield full + + def lookup(self, name): + ''' Search for a resource and return an absolute file path, or `None`. + + The :attr:`path` list is searched in order. The first match is + returend. Symlinks are followed. The result is cached to speed up + future lookups. ''' + if name not in self.cache or DEBUG: + for path in self.path: + fpath = os.path.join(path, name) + if os.path.isfile(fpath): + if self.cachemode in ('all', 'found'): + self.cache[name] = fpath + return fpath + if self.cachemode == 'all': + self.cache[name] = None + return self.cache[name] + + def open(self, name, mode='r', *args, **kwargs): + ''' Find a resource and return a file object, or raise IOError. ''' + fname = self.lookup(name) + if not fname: raise IOError("Resource %r not found." % name) + return self.opener(name, mode=mode, *args, **kwargs) + + + + + + +############################################################################### +# Application Helper ########################################################### +############################################################################### + + +def abort(code=500, text='Unknown Error: Application stopped.'): + """ Aborts execution and causes a HTTP error. """ + raise HTTPError(code, text) + + +def redirect(url, code=None): + """ Aborts execution and causes a 303 or 302 redirect, depending on + the HTTP protocol version. """ + if code is None: + code = 303 if request.get('SERVER_PROTOCOL') == "HTTP/1.1" else 302 + location = urljoin(request.url, url) + res = HTTPResponse("", status=code, Location=location) + if response._cookies: + res._cookies = response._cookies + raise res + + +def _file_iter_range(fp, offset, bytes, maxread=1024*1024): + ''' Yield chunks from a range in a file. No chunk is bigger than maxread.''' + fp.seek(offset) + while bytes > 0: + part = fp.read(min(bytes, maxread)) + if not part: break + bytes -= len(part) + yield part + + +def static_file(filename, root, mimetype='auto', download=False): + """ Open a file in a safe way and return :exc:`HTTPResponse` with status + code 200, 305, 401 or 404. Set Content-Type, Content-Encoding, + Content-Length and Last-Modified header. Obey If-Modified-Since header + and HEAD requests. + """ + root = os.path.abspath(root) + os.sep + filename = os.path.abspath(os.path.join(root, filename.strip('/\\'))) + headers = dict() + + if not filename.startswith(root): + return HTTPError(403, "Access denied.") + if not os.path.exists(filename) or not os.path.isfile(filename): + return HTTPError(404, "File does not exist.") + if not os.access(filename, os.R_OK): + return HTTPError(403, "You do not have permission to access this file.") + + if mimetype == 'auto': + mimetype, encoding = mimetypes.guess_type(filename) + if mimetype: headers['Content-Type'] = mimetype + if encoding: headers['Content-Encoding'] = encoding + elif mimetype: + headers['Content-Type'] = mimetype + + if download: + download = os.path.basename(filename if download == True else download) + headers['Content-Disposition'] = 'attachment; filename="%s"' % download + + stats = os.stat(filename) + headers['Content-Length'] = clen = stats.st_size + lm = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime)) + headers['Last-Modified'] = lm + + ims = request.environ.get('HTTP_IF_MODIFIED_SINCE') + if ims: + ims = parse_date(ims.split(";")[0].strip()) + if ims is not None and ims >= int(stats.st_mtime): + headers['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()) + return HTTPResponse(status=304, **headers) + + body = '' if request.method == 'HEAD' else open(filename, 'rb') + + headers["Accept-Ranges"] = "bytes" + ranges = request.environ.get('HTTP_RANGE') + if 'HTTP_RANGE' in request.environ: + ranges = list(parse_range_header(request.environ['HTTP_RANGE'], clen)) + if not ranges: + return HTTPError(416, "Requested Range Not Satisfiable") + offset, end = ranges[0] + headers["Content-Range"] = "bytes %d-%d/%d" % (offset, end-1, clen) + headers["Content-Length"] = str(end-offset) + if body: body = _file_iter_range(body, offset, end-offset) + return HTTPResponse(body, status=206, **headers) + return HTTPResponse(body, **headers) + + + + + + +############################################################################### +# HTTP Utilities and MISC (TODO) ############################################### +############################################################################### + + +def debug(mode=True): + """ Change the debug level. + There is only one debug level supported at the moment.""" + global DEBUG + DEBUG = bool(mode) + + +def parse_date(ims): + """ Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """ + try: + ts = email.utils.parsedate_tz(ims) + return time.mktime(ts[:8] + (0,)) - (ts[9] or 0) - time.timezone + except (TypeError, ValueError, IndexError, OverflowError): + return None + + +def parse_auth(header): + """ Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None""" + try: + method, data = header.split(None, 1) + if method.lower() == 'basic': + user, pwd = touni(base64.b64decode(tob(data))).split(':',1) + return user, pwd + except (KeyError, ValueError): + return None + +def parse_range_header(header, maxlen=0): + ''' Yield (start, end) ranges parsed from a HTTP Range header. Skip + unsatisfiable ranges. The end index is non-inclusive.''' + if not header or header[:6] != 'bytes=': return + ranges = [r.split('-', 1) for r in header[6:].split(',') if '-' in r] + for start, end in ranges: + try: + if not start: # bytes=-100 -> last 100 bytes + start, end = max(0, maxlen-int(end)), maxlen + elif not end: # bytes=100- -> all but the first 99 bytes + start, end = int(start), maxlen + else: # bytes=100-200 -> bytes 100-200 (inclusive) + start, end = int(start), min(int(end)+1, maxlen) + if 0 <= start < end <= maxlen: + yield start, end + except ValueError: + pass + +def _parse_qsl(qs): + r = [] + for pair in qs.replace(';','&').split('&'): + if not pair: continue + nv = pair.split('=', 1) + if len(nv) != 2: nv.append('') + key = urlunquote(nv[0].replace('+', ' ')) + value = urlunquote(nv[1].replace('+', ' ')) + r.append((key, value)) + return r + +def _lscmp(a, b): + ''' Compares two strings in a cryptographically safe way: + Runtime is not affected by length of common prefix. ''' + return not sum(0 if x==y else 1 for x, y in zip(a, b)) and len(a) == len(b) + + +def cookie_encode(data, key): + ''' Encode and sign a pickle-able object. Return a (byte) string ''' + msg = base64.b64encode(pickle.dumps(data, -1)) + sig = base64.b64encode(hmac.new(tob(key), msg).digest()) + return tob('!') + sig + tob('?') + msg + + +def cookie_decode(data, key): + ''' Verify and decode an encoded string. Return an object or None.''' + data = tob(data) + if cookie_is_encoded(data): + sig, msg = data.split(tob('?'), 1) + if _lscmp(sig[1:], base64.b64encode(hmac.new(tob(key), msg).digest())): + return pickle.loads(base64.b64decode(msg)) + return None + + +def cookie_is_encoded(data): + ''' Return True if the argument looks like a encoded cookie.''' + return bool(data.startswith(tob('!')) and tob('?') in data) + + +def html_escape(string): + ''' Escape HTML special characters ``&<>`` and quotes ``'"``. ''' + return string.replace('&','&').replace('<','<').replace('>','>')\ + .replace('"','"').replace("'",''') + + +def html_quote(string): + ''' Escape and quote a string to be used as an HTTP attribute.''' + return '"%s"' % html_escape(string).replace('\n','%#10;')\ + .replace('\r',' ').replace('\t',' ') + + +def yieldroutes(func): + """ Return a generator for routes that match the signature (name, args) + of the func parameter. This may yield more than one route if the function + takes optional keyword arguments. The output is best described by example:: + + a() -> '/a' + b(x, y) -> '/b/:x/:y' + c(x, y=5) -> '/c/:x' and '/c/:x/:y' + d(x=5, y=6) -> '/d' and '/d/:x' and '/d/:x/:y' + """ + import inspect # Expensive module. Only import if necessary. + path = '/' + func.__name__.replace('__','/').lstrip('/') + spec = inspect.getargspec(func) + argc = len(spec[0]) - len(spec[3] or []) + path += ('/:%s' * argc) % tuple(spec[0][:argc]) + yield path + for arg in spec[0][argc:]: + path += '/:%s' % arg + yield path + + +def path_shift(script_name, path_info, shift=1): + ''' Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa. + + :return: The modified paths. + :param script_name: The SCRIPT_NAME path. + :param script_name: The PATH_INFO path. + :param shift: The number of path fragments to shift. May be negative to + change the shift direction. (default: 1) + ''' + if shift == 0: return script_name, path_info + pathlist = path_info.strip('/').split('/') + scriptlist = script_name.strip('/').split('/') + if pathlist and pathlist[0] == '': pathlist = [] + if scriptlist and scriptlist[0] == '': scriptlist = [] + if shift > 0 and shift <= len(pathlist): + moved = pathlist[:shift] + scriptlist = scriptlist + moved + pathlist = pathlist[shift:] + elif shift < 0 and shift >= -len(scriptlist): + moved = scriptlist[shift:] + pathlist = moved + pathlist + scriptlist = scriptlist[:shift] + else: + empty = 'SCRIPT_NAME' if shift < 0 else 'PATH_INFO' + raise AssertionError("Cannot shift. Nothing left from %s" % empty) + new_script_name = '/' + '/'.join(scriptlist) + new_path_info = '/' + '/'.join(pathlist) + if path_info.endswith('/') and pathlist: new_path_info += '/' + return new_script_name, new_path_info + + +def validate(**vkargs): + """ + Validates and manipulates keyword arguments by user defined callables. + Handles ValueError and missing arguments by raising HTTPError(403). + """ + depr('Use route wildcard filters instead.') + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kargs): + for key, value in vkargs.items(): + if key not in kargs: + abort(403, 'Missing parameter: %s' % key) + try: + kargs[key] = value(kargs[key]) + except ValueError: + abort(403, 'Wrong parameter format for: %s' % key) + return func(*args, **kargs) + return wrapper + return decorator + + +def auth_basic(check, realm="private", text="Access denied"): + ''' Callback decorator to require HTTP auth (basic). + TODO: Add route(check_auth=...) parameter. ''' + def decorator(func): + def wrapper(*a, **ka): + user, password = request.auth or (None, None) + if user is None or not check(user, password): + response.headers['WWW-Authenticate'] = 'Basic realm="%s"' % realm + return HTTPError(401, text) + return func(*a, **ka) + return wrapper + return decorator + + +# Shortcuts for common Bottle methods. +# They all refer to the current default application. + +def make_default_app_wrapper(name): + ''' Return a callable that relays calls to the current default app. ''' + @functools.wraps(getattr(Bottle, name)) + def wrapper(*a, **ka): + return getattr(app(), name)(*a, **ka) + return wrapper + +route = make_default_app_wrapper('route') +get = make_default_app_wrapper('get') +post = make_default_app_wrapper('post') +put = make_default_app_wrapper('put') +delete = make_default_app_wrapper('delete') +error = make_default_app_wrapper('error') +mount = make_default_app_wrapper('mount') +hook = make_default_app_wrapper('hook') +install = make_default_app_wrapper('install') +uninstall = make_default_app_wrapper('uninstall') +url = make_default_app_wrapper('get_url') + + + + + + + +############################################################################### +# Server Adapter ############################################################### +############################################################################### + + +class ServerAdapter(object): + quiet = False + def __init__(self, host='127.0.0.1', port=8080, **config): + self.options = config + self.host = host + self.port = int(port) + + def run(self, handler): # pragma: no cover + pass + + def __repr__(self): + args = ', '.join(['%s=%s'%(k,repr(v)) for k, v in self.options.items()]) + return "%s(%s)" % (self.__class__.__name__, args) + + +class CGIServer(ServerAdapter): + quiet = True + def run(self, handler): # pragma: no cover + from wsgiref.handlers import CGIHandler + def fixed_environ(environ, start_response): + environ.setdefault('PATH_INFO', '') + return handler(environ, start_response) + CGIHandler().run(fixed_environ) + + +class FlupFCGIServer(ServerAdapter): + def run(self, handler): # pragma: no cover + import flup.server.fcgi + self.options.setdefault('bindAddress', (self.host, self.port)) + flup.server.fcgi.WSGIServer(handler, **self.options).run() + + +class WSGIRefServer(ServerAdapter): + def run(self, handler): # pragma: no cover + from wsgiref.simple_server import make_server, WSGIRequestHandler + if self.quiet: + class QuietHandler(WSGIRequestHandler): + def log_request(*args, **kw): pass + self.options['handler_class'] = QuietHandler + srv = make_server(self.host, self.port, handler, **self.options) + srv.serve_forever() + + +class CherryPyServer(ServerAdapter): + def run(self, handler): # pragma: no cover + from cherrypy import wsgiserver + server = wsgiserver.CherryPyWSGIServer((self.host, self.port), handler) + try: + server.start() + finally: + server.stop() + + +class WaitressServer(ServerAdapter): + def run(self, handler): + from waitress import serve + serve(handler, host=self.host, port=self.port) + + +class PasteServer(ServerAdapter): + def run(self, handler): # pragma: no cover + from paste import httpserver + if not self.quiet: + from paste.translogger import TransLogger + handler = TransLogger(handler) + httpserver.serve(handler, host=self.host, port=str(self.port), + **self.options) + + +class MeinheldServer(ServerAdapter): + def run(self, handler): + from meinheld import server + server.listen((self.host, self.port)) + server.run(handler) + + +class FapwsServer(ServerAdapter): + """ Extremely fast webserver using libev. See http://www.fapws.org/ """ + def run(self, handler): # pragma: no cover + import fapws._evwsgi as evwsgi + from fapws import base, config + port = self.port + if float(config.SERVER_IDENT[-2:]) > 0.4: + # fapws3 silently changed its API in 0.5 + port = str(port) + evwsgi.start(self.host, port) + # fapws3 never releases the GIL. Complain upstream. I tried. No luck. + if 'BOTTLE_CHILD' in os.environ and not self.quiet: + _stderr("WARNING: Auto-reloading does not work with Fapws3.\n") + _stderr(" (Fapws3 breaks python thread support)\n") + evwsgi.set_base_module(base) + def app(environ, start_response): + environ['wsgi.multiprocess'] = False + return handler(environ, start_response) + evwsgi.wsgi_cb(('', app)) + evwsgi.run() + + +class TornadoServer(ServerAdapter): + """ The super hyped asynchronous server by facebook. Untested. """ + def run(self, handler): # pragma: no cover + import tornado.wsgi, tornado.httpserver, tornado.ioloop + container = tornado.wsgi.WSGIContainer(handler) + server = tornado.httpserver.HTTPServer(container) + server.listen(port=self.port) + tornado.ioloop.IOLoop.instance().start() + + +class AppEngineServer(ServerAdapter): + """ Adapter for Google App Engine. """ + quiet = True + def run(self, handler): + from google.appengine.ext.webapp import util + # A main() function in the handler script enables 'App Caching'. + # Lets makes sure it is there. This _really_ improves performance. + module = sys.modules.get('__main__') + if module and not hasattr(module, 'main'): + module.main = lambda: util.run_wsgi_app(handler) + util.run_wsgi_app(handler) + + +class TwistedServer(ServerAdapter): + """ Untested. """ + def run(self, handler): + from twisted.web import server, wsgi + from twisted.python.threadpool import ThreadPool + from twisted.internet import reactor + thread_pool = ThreadPool() + thread_pool.start() + reactor.addSystemEventTrigger('after', 'shutdown', thread_pool.stop) + factory = server.Site(wsgi.WSGIResource(reactor, thread_pool, handler)) + reactor.listenTCP(self.port, factory, interface=self.host) + reactor.run() + + +class DieselServer(ServerAdapter): + """ Untested. """ + def run(self, handler): + from diesel.protocols.wsgi import WSGIApplication + app = WSGIApplication(handler, port=self.port) + app.run() + + +class GeventServer(ServerAdapter): + """ Untested. Options: + + * `fast` (default: False) uses libevent's http server, but has some + issues: No streaming, no pipelining, no SSL. + """ + def run(self, handler): + from gevent import wsgi, pywsgi, local + if not isinstance(_lctx, local.local): + msg = "Bottle requires gevent.monkey.patch_all() (before import)" + raise RuntimeError(msg) + if not self.options.get('fast'): wsgi = pywsgi + log = None if self.quiet else 'default' + wsgi.WSGIServer((self.host, self.port), handler, log=log).serve_forever() + + +class GunicornServer(ServerAdapter): + """ Untested. See http://gunicorn.org/configure.html for options. """ + def run(self, handler): + from gunicorn.app.base import Application + + config = {'bind': "%s:%d" % (self.host, int(self.port))} + config.update(self.options) + + class GunicornApplication(Application): + def init(self, parser, opts, args): + return config + + def load(self): + return handler + + GunicornApplication().run() + + +class EventletServer(ServerAdapter): + """ Untested """ + def run(self, handler): + from eventlet import wsgi, listen + try: + wsgi.server(listen((self.host, self.port)), handler, + log_output=(not self.quiet)) + except TypeError: + # Fallback, if we have old version of eventlet + wsgi.server(listen((self.host, self.port)), handler) + + +class RocketServer(ServerAdapter): + """ Untested. """ + def run(self, handler): + from rocket import Rocket + server = Rocket((self.host, self.port), 'wsgi', { 'wsgi_app' : handler }) + server.start() + + +class BjoernServer(ServerAdapter): + """ Fast server written in C: https://github.com/jonashaag/bjoern """ + def run(self, handler): + from bjoern import run + run(handler, self.host, self.port) + + +class AutoServer(ServerAdapter): + """ Untested. """ + adapters = [WaitressServer, PasteServer, TwistedServer, CherryPyServer, WSGIRefServer] + def run(self, handler): + for sa in self.adapters: + try: + return sa(self.host, self.port, **self.options).run(handler) + except ImportError: + pass + +server_names = { + 'cgi': CGIServer, + 'flup': FlupFCGIServer, + 'wsgiref': WSGIRefServer, + 'waitress': WaitressServer, + 'cherrypy': CherryPyServer, + 'paste': PasteServer, + 'fapws3': FapwsServer, + 'tornado': TornadoServer, + 'gae': AppEngineServer, + 'twisted': TwistedServer, + 'diesel': DieselServer, + 'meinheld': MeinheldServer, + 'gunicorn': GunicornServer, + 'eventlet': EventletServer, + 'gevent': GeventServer, + 'rocket': RocketServer, + 'bjoern' : BjoernServer, + 'auto': AutoServer, +} + + + + + + +############################################################################### +# Application Control ########################################################## +############################################################################### + + +def load(target, **namespace): + """ Import a module or fetch an object from a module. + + * ``package.module`` returns `module` as a module object. + * ``pack.mod:name`` returns the module variable `name` from `pack.mod`. + * ``pack.mod:func()`` calls `pack.mod.func()` and returns the result. + + The last form accepts not only function calls, but any type of + expression. Keyword arguments passed to this function are available as + local variables. Example: ``import_string('re:compile(x)', x='[a-z]')`` + """ + module, target = target.split(":", 1) if ':' in target else (target, None) + if module not in sys.modules: __import__(module) + if not target: return sys.modules[module] + if target.isalnum(): return getattr(sys.modules[module], target) + package_name = module.split('.')[0] + namespace[package_name] = sys.modules[package_name] + return eval('%s.%s' % (module, target), namespace) + + +def load_app(target): + """ Load a bottle application from a module and make sure that the import + does not affect the current default application, but returns a separate + application object. See :func:`load` for the target parameter. """ + global NORUN; NORUN, nr_old = True, NORUN + try: + tmp = default_app.push() # Create a new "default application" + rv = load(target) # Import the target module + return rv if callable(rv) else tmp + finally: + default_app.remove(tmp) # Remove the temporary added default application + NORUN = nr_old + +_debug = debug +def run(app=None, server='wsgiref', host='127.0.0.1', port=8080, + interval=1, reloader=False, quiet=False, plugins=None, + debug=False, **kargs): + """ Start a server instance. This method blocks until the server terminates. + + :param app: WSGI application or target string supported by + :func:`load_app`. (default: :func:`default_app`) + :param server: Server adapter to use. See :data:`server_names` keys + for valid names or pass a :class:`ServerAdapter` subclass. + (default: `wsgiref`) + :param host: Server address to bind to. Pass ``0.0.0.0`` to listens on + all interfaces including the external one. (default: 127.0.0.1) + :param port: Server port to bind to. Values below 1024 require root + privileges. (default: 8080) + :param reloader: Start auto-reloading server? (default: False) + :param interval: Auto-reloader interval in seconds (default: 1) + :param quiet: Suppress output to stdout and stderr? (default: False) + :param options: Options passed to the server adapter. + """ + if NORUN: return + if reloader and not os.environ.get('BOTTLE_CHILD'): + try: + lockfile = None + fd, lockfile = tempfile.mkstemp(prefix='bottle.', suffix='.lock') + os.close(fd) # We only need this file to exist. We never write to it + while os.path.exists(lockfile): + args = [sys.executable] + sys.argv + environ = os.environ.copy() + environ['BOTTLE_CHILD'] = 'true' + environ['BOTTLE_LOCKFILE'] = lockfile + p = subprocess.Popen(args, env=environ) + while p.poll() is None: # Busy wait... + os.utime(lockfile, None) # I am alive! + time.sleep(interval) + if p.poll() != 3: + if os.path.exists(lockfile): os.unlink(lockfile) + sys.exit(p.poll()) + except KeyboardInterrupt: + pass + finally: + if os.path.exists(lockfile): + os.unlink(lockfile) + return + + try: + _debug(debug) + app = app or default_app() + if isinstance(app, basestring): + app = load_app(app) + if not callable(app): + raise ValueError("Application is not callable: %r" % app) + + for plugin in plugins or []: + app.install(plugin) + + if server in server_names: + server = server_names.get(server) + if isinstance(server, basestring): + server = load(server) + if isinstance(server, type): + server = server(host=host, port=port, **kargs) + if not isinstance(server, ServerAdapter): + raise ValueError("Unknown or unsupported server: %r" % server) + + server.quiet = server.quiet or quiet + if not server.quiet: + _stderr("Bottle v%s server starting up (using %s)...\n" % (__version__, repr(server))) + _stderr("Listening on http://%s:%d/\n" % (server.host, server.port)) + _stderr("Hit Ctrl-C to quit.\n\n") + + if reloader: + lockfile = os.environ.get('BOTTLE_LOCKFILE') + bgcheck = FileCheckerThread(lockfile, interval) + with bgcheck: + server.run(app) + if bgcheck.status == 'reload': + sys.exit(3) + else: + server.run(app) + except KeyboardInterrupt: + pass + except (SystemExit, MemoryError): + raise + except: + if not reloader: raise + if not getattr(server, 'quiet', quiet): + print_exc() + time.sleep(interval) + sys.exit(3) + + + +class FileCheckerThread(threading.Thread): + ''' Interrupt main-thread as soon as a changed module file is detected, + the lockfile gets deleted or gets to old. ''' + + def __init__(self, lockfile, interval): + threading.Thread.__init__(self) + self.lockfile, self.interval = lockfile, interval + #: Is one of 'reload', 'error' or 'exit' + self.status = None + + def run(self): + exists = os.path.exists + mtime = lambda path: os.stat(path).st_mtime + files = dict() + + for module in list(sys.modules.values()): + path = getattr(module, '__file__', '') + if path[-4:] in ('.pyo', '.pyc'): path = path[:-1] + if path and exists(path): files[path] = mtime(path) + + while not self.status: + if not exists(self.lockfile)\ + or mtime(self.lockfile) < time.time() - self.interval - 5: + self.status = 'error' + thread.interrupt_main() + for path, lmtime in list(files.items()): + if not exists(path) or mtime(path) > lmtime: + self.status = 'reload' + thread.interrupt_main() + break + time.sleep(self.interval) + + def __enter__(self): + self.start() + + def __exit__(self, exc_type, exc_val, exc_tb): + if not self.status: self.status = 'exit' # silent exit + self.join() + return exc_type is not None and issubclass(exc_type, KeyboardInterrupt) + + + + + +############################################################################### +# Template Adapters ############################################################ +############################################################################### + + +class TemplateError(HTTPError): + def __init__(self, message): + HTTPError.__init__(self, 500, message) + + +class BaseTemplate(object): + """ Base class and minimal API for template adapters """ + extensions = ['tpl','html','thtml','stpl'] + settings = {} #used in prepare() + defaults = {} #used in render() + + def __init__(self, source=None, name=None, lookup=[], encoding='utf8', **settings): + """ Create a new template. + If the source parameter (str or buffer) is missing, the name argument + is used to guess a template filename. Subclasses can assume that + self.source and/or self.filename are set. Both are strings. + The lookup, encoding and settings parameters are stored as instance + variables. + The lookup parameter stores a list containing directory paths. + The encoding parameter should be used to decode byte strings or files. + The settings parameter contains a dict for engine-specific settings. + """ + self.name = name + self.source = source.read() if hasattr(source, 'read') else source + self.filename = source.filename if hasattr(source, 'filename') else None + self.lookup = [os.path.abspath(x) for x in lookup] + self.encoding = encoding + self.settings = self.settings.copy() # Copy from class variable + self.settings.update(settings) # Apply + if not self.source and self.name: + self.filename = self.search(self.name, self.lookup) + if not self.filename: + raise TemplateError('Template %s not found.' % repr(name)) + if not self.source and not self.filename: + raise TemplateError('No template specified.') + self.prepare(**self.settings) + + @classmethod + def search(cls, name, lookup=[]): + """ Search name in all directories specified in lookup. + First without, then with common extensions. Return first hit. """ + if not lookup: + depr('The template lookup path list should not be empty.') + lookup = ['.'] + + if os.path.isabs(name) and os.path.isfile(name): + depr('Absolute template path names are deprecated.') + return os.path.abspath(name) + + for spath in lookup: + spath = os.path.abspath(spath) + os.sep + fname = os.path.abspath(os.path.join(spath, name)) + if not fname.startswith(spath): continue + if os.path.isfile(fname): return fname + for ext in cls.extensions: + if os.path.isfile('%s.%s' % (fname, ext)): + return '%s.%s' % (fname, ext) + + @classmethod + def global_config(cls, key, *args): + ''' This reads or sets the global settings stored in class.settings. ''' + if args: + cls.settings = cls.settings.copy() # Make settings local to class + cls.settings[key] = args[0] + else: + return cls.settings[key] + + def prepare(self, **options): + """ Run preparations (parsing, caching, ...). + It should be possible to call this again to refresh a template or to + update settings. + """ + raise NotImplementedError + + def render(self, *args, **kwargs): + """ Render the template with the specified local variables and return + a single byte or unicode string. If it is a byte string, the encoding + must match self.encoding. This method must be thread-safe! + Local variables may be provided in dictionaries (*args) + or directly, as keywords (**kwargs). + """ + raise NotImplementedError + + +class MakoTemplate(BaseTemplate): + def prepare(self, **options): + from mako.template import Template + from mako.lookup import TemplateLookup + options.update({'input_encoding':self.encoding}) + options.setdefault('format_exceptions', bool(DEBUG)) + lookup = TemplateLookup(directories=self.lookup, **options) + if self.source: + self.tpl = Template(self.source, lookup=lookup, **options) + else: + self.tpl = Template(uri=self.name, filename=self.filename, lookup=lookup, **options) + + def render(self, *args, **kwargs): + for dictarg in args: kwargs.update(dictarg) + _defaults = self.defaults.copy() + _defaults.update(kwargs) + return self.tpl.render(**_defaults) + + +class CheetahTemplate(BaseTemplate): + def prepare(self, **options): + from Cheetah.Template import Template + self.context = threading.local() + self.context.vars = {} + options['searchList'] = [self.context.vars] + if self.source: + self.tpl = Template(source=self.source, **options) + else: + self.tpl = Template(file=self.filename, **options) + + def render(self, *args, **kwargs): + for dictarg in args: kwargs.update(dictarg) + self.context.vars.update(self.defaults) + self.context.vars.update(kwargs) + out = str(self.tpl) + self.context.vars.clear() + return out + + +class Jinja2Template(BaseTemplate): + def prepare(self, filters=None, tests=None, **kwargs): + from jinja2 import Environment, FunctionLoader + if 'prefix' in kwargs: # TODO: to be removed after a while + raise RuntimeError('The keyword argument `prefix` has been removed. ' + 'Use the full jinja2 environment name line_statement_prefix instead.') + self.env = Environment(loader=FunctionLoader(self.loader), **kwargs) + if filters: self.env.filters.update(filters) + if tests: self.env.tests.update(tests) + if self.source: + self.tpl = self.env.from_string(self.source) + else: + self.tpl = self.env.get_template(self.filename) + + def render(self, *args, **kwargs): + for dictarg in args: kwargs.update(dictarg) + _defaults = self.defaults.copy() + _defaults.update(kwargs) + return self.tpl.render(**_defaults) + + def loader(self, name): + fname = self.search(name, self.lookup) + if not fname: return + with open(fname, "rb") as f: + return f.read().decode(self.encoding) + + +class SimpleTALTemplate(BaseTemplate): + ''' Deprecated, do not use. ''' + def prepare(self, **options): + depr('The SimpleTAL template handler is deprecated'\ + ' and will be removed in 0.12') + from simpletal import simpleTAL + if self.source: + self.tpl = simpleTAL.compileHTMLTemplate(self.source) + else: + with open(self.filename, 'rb') as fp: + self.tpl = simpleTAL.compileHTMLTemplate(tonat(fp.read())) + + def render(self, *args, **kwargs): + from simpletal import simpleTALES + for dictarg in args: kwargs.update(dictarg) + context = simpleTALES.Context() + for k,v in self.defaults.items(): + context.addGlobal(k, v) + for k,v in kwargs.items(): + context.addGlobal(k, v) + output = StringIO() + self.tpl.expand(context, output) + return output.getvalue() + + +class SimpleTemplate(BaseTemplate): + blocks = ('if', 'elif', 'else', 'try', 'except', 'finally', 'for', 'while', + 'with', 'def', 'class') + dedent_blocks = ('elif', 'else', 'except', 'finally') + + @lazy_attribute + def re_pytokens(cls): + ''' This matches comments and all kinds of quoted strings but does + NOT match comments (#...) within quoted strings. (trust me) ''' + return re.compile(r''' + (''(?!')|""(?!")|'{6}|"{6} # Empty strings (all 4 types) + |'(?:[^\\']|\\.)+?' # Single quotes (') + |"(?:[^\\"]|\\.)+?" # Double quotes (") + |'{3}(?:[^\\]|\\.|\n)+?'{3} # Triple-quoted strings (') + |"{3}(?:[^\\]|\\.|\n)+?"{3} # Triple-quoted strings (") + |\#.* # Comments + )''', re.VERBOSE) + + def prepare(self, escape_func=html_escape, noescape=False, **kwargs): + self.cache = {} + enc = self.encoding + self._str = lambda x: touni(x, enc) + self._escape = lambda x: escape_func(touni(x, enc)) + if noescape: + self._str, self._escape = self._escape, self._str + + @classmethod + def split_comment(cls, code): + """ Removes comments (#...) from python code. """ + if '#' not in code: return code + #: Remove comments only (leave quoted strings as they are) + subf = lambda m: '' if m.group(0)[0]=='#' else m.group(0) + return re.sub(cls.re_pytokens, subf, code) + + @cached_property + def co(self): + return compile(self.code, self.filename or '', 'exec') + + @cached_property + def code(self): + stack = [] # Current Code indentation + lineno = 0 # Current line of code + ptrbuffer = [] # Buffer for printable strings and token tuple instances + codebuffer = [] # Buffer for generated python code + multiline = dedent = oneline = False + template = self.source or open(self.filename, 'rb').read() + + def yield_tokens(line): + for i, part in enumerate(re.split(r'\{\{(.*?)\}\}', line)): + if i % 2: + if part.startswith('!'): yield 'RAW', part[1:] + else: yield 'CMD', part + else: yield 'TXT', part + + def flush(): # Flush the ptrbuffer + if not ptrbuffer: return + cline = '' + for line in ptrbuffer: + for token, value in line: + if token == 'TXT': cline += repr(value) + elif token == 'RAW': cline += '_str(%s)' % value + elif token == 'CMD': cline += '_escape(%s)' % value + cline += ', ' + cline = cline[:-2] + '\\\n' + cline = cline[:-2] + if cline[:-1].endswith('\\\\\\\\\\n'): + cline = cline[:-7] + cline[-1] # 'nobr\\\\\n' --> 'nobr' + cline = '_printlist([' + cline + '])' + del ptrbuffer[:] # Do this before calling code() again + code(cline) + + def code(stmt): + for line in stmt.splitlines(): + codebuffer.append(' ' * len(stack) + line.strip()) + + for line in template.splitlines(True): + lineno += 1 + line = touni(line, self.encoding) + sline = line.lstrip() + if lineno <= 2: + m = re.match(r"%\s*#.*coding[:=]\s*([-\w.]+)", sline) + if m: self.encoding = m.group(1) + if m: line = line.replace('coding','coding (removed)') + if sline and sline[0] == '%' and sline[:2] != '%%': + line = line.split('%',1)[1].lstrip() # Full line following the % + cline = self.split_comment(line).strip() + cmd = re.split(r'[^a-zA-Z0-9_]', cline)[0] + flush() # You are actually reading this? Good luck, it's a mess :) + if cmd in self.blocks or multiline: + cmd = multiline or cmd + dedent = cmd in self.dedent_blocks # "else:" + if dedent and not oneline and not multiline: + cmd = stack.pop() + code(line) + oneline = not cline.endswith(':') # "if 1: pass" + multiline = cmd if cline.endswith('\\') else False + if not oneline and not multiline: + stack.append(cmd) + elif cmd == 'end' and stack: + code('#end(%s) %s' % (stack.pop(), line.strip()[3:])) + elif cmd == 'include': + p = cline.split(None, 2)[1:] + if len(p) == 2: + code("_=_include(%s, _stdout, %s)" % (repr(p[0]), p[1])) + elif p: + code("_=_include(%s, _stdout)" % repr(p[0])) + else: # Empty %include -> reverse of %rebase + code("_printlist(_base)") + elif cmd == 'rebase': + p = cline.split(None, 2)[1:] + if len(p) == 2: + code("globals()['_rebase']=(%s, dict(%s))" % (repr(p[0]), p[1])) + elif p: + code("globals()['_rebase']=(%s, {})" % repr(p[0])) + else: + code(line) + else: # Line starting with text (not '%') or '%%' (escaped) + if line.strip().startswith('%%'): + line = line.replace('%%', '%', 1) + ptrbuffer.append(yield_tokens(line)) + flush() + return '\n'.join(codebuffer) + '\n' + + def subtemplate(self, _name, _stdout, *args, **kwargs): + for dictarg in args: kwargs.update(dictarg) + if _name not in self.cache: + self.cache[_name] = self.__class__(name=_name, lookup=self.lookup) + return self.cache[_name].execute(_stdout, kwargs) + + def execute(self, _stdout, *args, **kwargs): + for dictarg in args: kwargs.update(dictarg) + env = self.defaults.copy() + env.update({'_stdout': _stdout, '_printlist': _stdout.extend, + '_include': self.subtemplate, '_str': self._str, + '_escape': self._escape, 'get': env.get, + 'setdefault': env.setdefault, 'defined': env.__contains__}) + env.update(kwargs) + eval(self.co, env) + if '_rebase' in env: + subtpl, rargs = env['_rebase'] + rargs['_base'] = _stdout[:] #copy stdout + del _stdout[:] # clear stdout + return self.subtemplate(subtpl,_stdout,rargs) + return env + + def render(self, *args, **kwargs): + """ Render the template using keyword arguments as local variables. """ + for dictarg in args: kwargs.update(dictarg) + stdout = [] + self.execute(stdout, kwargs) + return ''.join(stdout) + + +def template(*args, **kwargs): + ''' + Get a rendered template as a string iterator. + You can use a name, a filename or a template string as first parameter. + Template rendering arguments can be passed as dictionaries + or directly (as keyword arguments). + ''' + tpl = args[0] if args else None + adapter = kwargs.pop('template_adapter', SimpleTemplate) + lookup = kwargs.pop('template_lookup', TEMPLATE_PATH) + tplid = (id(lookup), tpl) + if tplid not in TEMPLATES or DEBUG: + settings = kwargs.pop('template_settings', {}) + if isinstance(tpl, adapter): + TEMPLATES[tplid] = tpl + if settings: TEMPLATES[tplid].prepare(**settings) + elif "\n" in tpl or "{" in tpl or "%" in tpl or '$' in tpl: + TEMPLATES[tplid] = adapter(source=tpl, lookup=lookup, **settings) + else: + TEMPLATES[tplid] = adapter(name=tpl, lookup=lookup, **settings) + if not TEMPLATES[tplid]: + abort(500, 'Template (%s) not found' % tpl) + for dictarg in args[1:]: kwargs.update(dictarg) + return TEMPLATES[tplid].render(kwargs) + +mako_template = functools.partial(template, template_adapter=MakoTemplate) +cheetah_template = functools.partial(template, template_adapter=CheetahTemplate) +jinja2_template = functools.partial(template, template_adapter=Jinja2Template) +simpletal_template = functools.partial(template, template_adapter=SimpleTALTemplate) + + +def view(tpl_name, **defaults): + ''' Decorator: renders a template for a handler. + The handler can control its behavior like that: + + - return a dict of template vars to fill out the template + - return something other than a dict and the view decorator will not + process the template, but return the handler result as is. + This includes returning a HTTPResponse(dict) to get, + for instance, JSON with autojson or other castfilters. + ''' + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + result = func(*args, **kwargs) + if isinstance(result, (dict, DictMixin)): + tplvars = defaults.copy() + tplvars.update(result) + return template(tpl_name, **tplvars) + return result + return wrapper + return decorator + +mako_view = functools.partial(view, template_adapter=MakoTemplate) +cheetah_view = functools.partial(view, template_adapter=CheetahTemplate) +jinja2_view = functools.partial(view, template_adapter=Jinja2Template) +simpletal_view = functools.partial(view, template_adapter=SimpleTALTemplate) + + + + + + +############################################################################### +# Constants and Globals ######################################################## +############################################################################### + + +TEMPLATE_PATH = ['./', './views/'] +TEMPLATES = {} +DEBUG = False +NORUN = False # If set, run() does nothing. Used by load_app() + +#: A dict to map HTTP status codes (e.g. 404) to phrases (e.g. 'Not Found') +HTTP_CODES = httplib.responses +HTTP_CODES[418] = "I'm a teapot" # RFC 2324 +HTTP_CODES[428] = "Precondition Required" +HTTP_CODES[429] = "Too Many Requests" +HTTP_CODES[431] = "Request Header Fields Too Large" +HTTP_CODES[511] = "Network Authentication Required" +_HTTP_STATUS_LINES = dict((k, '%d %s'%(k,v)) for (k,v) in HTTP_CODES.items()) + +#: The default template used for error pages. Override with @error() +ERROR_PAGE_TEMPLATE = """ +%%try: + %%from %s import DEBUG, HTTP_CODES, request, touni + + + + Error: {{e.status}} + + + +

Error: {{e.status}}

+

Sorry, the requested URL {{repr(request.url)}} + caused an error:

+
{{e.body}}
+ %%if DEBUG and e.exception: +

Exception:

+
{{repr(e.exception)}}
+ %%end + %%if DEBUG and e.traceback: +

Traceback:

+
{{e.traceback}}
+ %%end + + +%%except ImportError: + ImportError: Could not generate the error page. Please add bottle to + the import path. +%%end +""" % __name__ + +#: A thread-safe instance of :class:`LocalRequest`. If accessed from within a +#: request callback, this instance always refers to the *current* request +#: (even on a multithreaded server). +request = LocalRequest() + +#: A thread-safe instance of :class:`LocalResponse`. It is used to change the +#: HTTP response for the *current* request. +response = LocalResponse() + +#: A thread-safe namespace. Not used by Bottle. +local = threading.local() + +# Initialize app stack (create first empty Bottle app) +# BC: 0.6.4 and needed for run() +app = default_app = AppStack() +app.push() + +#: A virtual package that redirects import statements. +#: Example: ``import bottle.ext.sqlite`` actually imports `bottle_sqlite`. +ext = _ImportRedirect('bottle.ext' if __name__ == '__main__' else __name__+".ext", 'bottle_%s').module + +if __name__ == '__main__': + opt, args, parser = _cmd_options, _cmd_args, _cmd_parser + if opt.version: + _stdout('Bottle %s\n'%__version__) + sys.exit(0) + if not args: + parser.print_help() + _stderr('\nError: No application specified.\n') + sys.exit(1) + + sys.path.insert(0, '.') + sys.modules.setdefault('bottle', sys.modules['__main__']) + + host, port = (opt.bind or 'localhost'), 8080 + if ':' in host: + host, port = host.rsplit(':', 1) + + run(args[0], host=host, port=port, server=opt.server, + reloader=opt.reload, plugins=opt.plugin, debug=opt.debug) + + + + +# THE END diff --git a/pyload/lib/forwarder.py b/pyload/lib/forwarder.py new file mode 100644 index 000000000..eacb33c2b --- /dev/null +++ b/pyload/lib/forwarder.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- + +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: RaNaN +""" + +from sys import argv +from sys import exit + +import socket +import thread + +from traceback import print_exc + +class Forwarder(): + + def __init__(self, extip,extport=9666): + print "Start portforwarding to %s:%s" % (extip, extport) + proxy(extip, extport, 9666) + + +def proxy(*settings): + while True: + server(*settings) + +def server(*settings): + try: + dock_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + dock_socket.bind(("127.0.0.1", settings[2])) + dock_socket.listen(5) + while True: + client_socket = dock_socket.accept()[0] + server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + server_socket.connect((settings[0], settings[1])) + thread.start_new_thread(forward, (client_socket, server_socket)) + thread.start_new_thread(forward, (server_socket, client_socket)) + except Exception: + print_exc() + + +def forward(source, destination): + string = ' ' + while string: + string = source.recv(1024) + if string: + destination.sendall(string) + else: + #source.shutdown(socket.SHUT_RD) + destination.shutdown(socket.SHUT_WR) + +if __name__ == "__main__": + args = argv[1:] + if not args: + print "Usage: forwarder.py " + exit() + if len(args) == 1: + args.append(9666) + + f = Forwarder(args[0], int(args[1])) + \ No newline at end of file diff --git a/pyload/lib/hg_tool.py b/pyload/lib/hg_tool.py new file mode 100644 index 000000000..cd97833df --- /dev/null +++ b/pyload/lib/hg_tool.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import re +from subprocess import Popen, PIPE +from time import time, gmtime, strftime + +aliases = {"zoidber": "zoidberg", "zoidberg10": "zoidberg", "webmaster": "dhmh", "mast3rranan": "ranan", + "ranan2": "ranan"} +exclude = ["locale/*", "module/lib/*"] +date_format = "%Y-%m-%d" +line_re = re.compile(r" (\d+) \**", re.I) + +def add_exclude_flags(args): + for dir in exclude: + args.extend(["-X", dir]) + +# remove small percentages +def wipe(data, perc=1): + s = (sum(data.values()) * perc) / 100 + for k, v in data.items(): + if v < s: del data[k] + + return data + +# remove aliases +def de_alias(data): + for k, v in aliases.iteritems(): + if k not in data: continue + alias = aliases[k] + + if alias in data: data[alias] += data[k] + else: data[alias] = data[k] + + del data[k] + + return data + + +def output(data): + s = float(sum(data.values())) + print "Total Lines: %d" % s + for k, v in data.iteritems(): + print "%15s: %.1f%% | %d" % (k, (v * 100) / s, v) + print + + +def file_list(): + args = ["hg", "status", "-A"] + add_exclude_flags(args) + p = Popen(args, stdout=PIPE) + out, err = p.communicate() + return [x.split()[1] for x in out.splitlines() if x.split()[0] in "CMA"] + + +def hg_annotate(path): + args = ["hg", "annotate", "-u", path] + p = Popen(args, stdout=PIPE) + out, err = p.communicate() + + data = {} + + for line in out.splitlines(): + author, non, line = line.partition(":") + + # probably binary file + if author == path: return {} + + author = author.strip().lower() + if not line.strip(): continue # don't count blank lines + + if author in data: data[author] += 1 + else: data[author] = 1 + + return de_alias(data) + + +def hg_churn(days=None): + args = ["hg", "churn"] + if days: + args.append("-d") + t = time() - 60 * 60 * 24 * days + args.append("%s to %s" % (strftime(date_format, gmtime(t)), strftime(date_format))) + + add_exclude_flags(args) + p = Popen(args, stdout=PIPE) + out, err = p.communicate() + + data = {} + + for line in out.splitlines(): + m = line_re.search(line) + author = line.split()[0] + lines = int(m.group(1)) + + if "@" in author: + author, n, email = author.partition("@") + + author = author.strip().lower() + + if author in data: data[author] += lines + else: data[author] = lines + + return de_alias(data) + + +def complete_annotate(): + files = file_list() + data = {} + for f in files: + tmp = hg_annotate(f) + for k, v in tmp.iteritems(): + if k in data: data[k] += v + else: data[k] = v + + return data + + +if __name__ == "__main__": + for d in (30, 90, 180): + c = wipe(hg_churn(d)) + print "Changes in %d days:" % d + output(c) + + c = wipe(hg_churn()) + print "Total changes:" + output(c) + + print "Current source code version:" + data = wipe(complete_annotate()) + output(data) + + diff --git a/pyload/lib/mod_pywebsocket/COPYING b/pyload/lib/mod_pywebsocket/COPYING new file mode 100644 index 000000000..989d02e4c --- /dev/null +++ b/pyload/lib/mod_pywebsocket/COPYING @@ -0,0 +1,28 @@ +Copyright 2012, Google Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pyload/lib/mod_pywebsocket/__init__.py b/pyload/lib/mod_pywebsocket/__init__.py new file mode 100644 index 000000000..454ae0c45 --- /dev/null +++ b/pyload/lib/mod_pywebsocket/__init__.py @@ -0,0 +1,197 @@ +# Copyright 2011, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +"""WebSocket extension for Apache HTTP Server. + +mod_pywebsocket is a WebSocket extension for Apache HTTP Server +intended for testing or experimental purposes. mod_python is required. + + +Installation +============ + +0. Prepare an Apache HTTP Server for which mod_python is enabled. + +1. Specify the following Apache HTTP Server directives to suit your + configuration. + + If mod_pywebsocket is not in the Python path, specify the following. + is the directory where mod_pywebsocket is installed. + + PythonPath "sys.path+['']" + + Always specify the following. is the directory where + user-written WebSocket handlers are placed. + + PythonOption mod_pywebsocket.handler_root + PythonHeaderParserHandler mod_pywebsocket.headerparserhandler + + To limit the search for WebSocket handlers to a directory + under , configure as follows: + + PythonOption mod_pywebsocket.handler_scan + + is useful in saving scan time when + contains many non-WebSocket handler files. + + If you want to allow handlers whose canonical path is not under the root + directory (i.e. symbolic link is in root directory but its target is not), + configure as follows: + + PythonOption mod_pywebsocket.allow_handlers_outside_root_dir On + + Example snippet of httpd.conf: + (mod_pywebsocket is in /websock_lib, WebSocket handlers are in + /websock_handlers, port is 80 for ws, 443 for wss.) + + + PythonPath "sys.path+['/websock_lib']" + PythonOption mod_pywebsocket.handler_root /websock_handlers + PythonHeaderParserHandler mod_pywebsocket.headerparserhandler + + +2. Tune Apache parameters for serving WebSocket. We'd like to note that at + least TimeOut directive from core features and RequestReadTimeout + directive from mod_reqtimeout should be modified not to kill connections + in only a few seconds of idle time. + +3. Verify installation. You can use example/console.html to poke the server. + + +Writing WebSocket handlers +========================== + +When a WebSocket request comes in, the resource name +specified in the handshake is considered as if it is a file path under + and the handler defined in +/_wsh.py is invoked. + +For example, if the resource name is /example/chat, the handler defined in +/example/chat_wsh.py is invoked. + +A WebSocket handler is composed of the following three functions: + + web_socket_do_extra_handshake(request) + web_socket_transfer_data(request) + web_socket_passive_closing_handshake(request) + +where: + request: mod_python request. + +web_socket_do_extra_handshake is called during the handshake after the +headers are successfully parsed and WebSocket properties (ws_location, +ws_origin, and ws_resource) are added to request. A handler +can reject the request by raising an exception. + +A request object has the following properties that you can use during the +extra handshake (web_socket_do_extra_handshake): +- ws_resource +- ws_origin +- ws_version +- ws_location (HyBi 00 only) +- ws_extensions (HyBi 06 and later) +- ws_deflate (HyBi 06 and later) +- ws_protocol +- ws_requested_protocols (HyBi 06 and later) + +The last two are a bit tricky. See the next subsection. + + +Subprotocol Negotiation +----------------------- + +For HyBi 06 and later, ws_protocol is always set to None when +web_socket_do_extra_handshake is called. If ws_requested_protocols is not +None, you must choose one subprotocol from this list and set it to +ws_protocol. + +For HyBi 00, when web_socket_do_extra_handshake is called, +ws_protocol is set to the value given by the client in +Sec-WebSocket-Protocol header or None if +such header was not found in the opening handshake request. Finish extra +handshake with ws_protocol untouched to accept the request subprotocol. +Then, Sec-WebSocket-Protocol header will be sent to +the client in response with the same value as requested. Raise an exception +in web_socket_do_extra_handshake to reject the requested subprotocol. + + +Data Transfer +------------- + +web_socket_transfer_data is called after the handshake completed +successfully. A handler can receive/send messages from/to the client +using request. mod_pywebsocket.msgutil module provides utilities +for data transfer. + +You can receive a message by the following statement. + + message = request.ws_stream.receive_message() + +This call blocks until any complete text frame arrives, and the payload data +of the incoming frame will be stored into message. When you're using IETF +HyBi 00 or later protocol, receive_message() will return None on receiving +client-initiated closing handshake. When any error occurs, receive_message() +will raise some exception. + +You can send a message by the following statement. + + request.ws_stream.send_message(message) + + +Closing Connection +------------------ + +Executing the following statement or just return-ing from +web_socket_transfer_data cause connection close. + + request.ws_stream.close_connection() + +close_connection will wait +for closing handshake acknowledgement coming from the client. When it +couldn't receive a valid acknowledgement, raises an exception. + +web_socket_passive_closing_handshake is called after the server receives +incoming closing frame from the client peer immediately. You can specify +code and reason by return values. They are sent as a outgoing closing frame +from the server. A request object has the following properties that you can +use in web_socket_passive_closing_handshake. +- ws_close_code +- ws_close_reason + + +Threading +--------- + +A WebSocket handler must be thread-safe if the server (Apache or +standalone.py) is configured to use threads. +""" + + +# vi:sts=4 sw=4 et tw=72 diff --git a/pyload/lib/mod_pywebsocket/_stream_base.py b/pyload/lib/mod_pywebsocket/_stream_base.py new file mode 100644 index 000000000..60fb33d2c --- /dev/null +++ b/pyload/lib/mod_pywebsocket/_stream_base.py @@ -0,0 +1,165 @@ +# Copyright 2011, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +"""Base stream class. +""" + + +# Note: request.connection.write/read are used in this module, even though +# mod_python document says that they should be used only in connection +# handlers. Unfortunately, we have no other options. For example, +# request.write/read are not suitable because they don't allow direct raw bytes +# writing/reading. + + +from mod_pywebsocket import util + + +# Exceptions + + +class ConnectionTerminatedException(Exception): + """This exception will be raised when a connection is terminated + unexpectedly. + """ + + pass + + +class InvalidFrameException(ConnectionTerminatedException): + """This exception will be raised when we received an invalid frame we + cannot parse. + """ + + pass + + +class BadOperationException(Exception): + """This exception will be raised when send_message() is called on + server-terminated connection or receive_message() is called on + client-terminated connection. + """ + + pass + + +class UnsupportedFrameException(Exception): + """This exception will be raised when we receive a frame with flag, opcode + we cannot handle. Handlers can just catch and ignore this exception and + call receive_message() again to continue processing the next frame. + """ + + pass + + +class InvalidUTF8Exception(Exception): + """This exception will be raised when we receive a text frame which + contains invalid UTF-8 strings. + """ + + pass + + +class StreamBase(object): + """Base stream class.""" + + def __init__(self, request): + """Construct an instance. + + Args: + request: mod_python request. + """ + + self._logger = util.get_class_logger(self) + + self._request = request + + def _read(self, length): + """Reads length bytes from connection. In case we catch any exception, + prepends remote address to the exception message and raise again. + + Raises: + ConnectionTerminatedException: when read returns empty string. + """ + + bytes = self._request.connection.read(length) + if not bytes: + raise ConnectionTerminatedException( + 'Receiving %d byte failed. Peer (%r) closed connection' % + (length, (self._request.connection.remote_addr,))) + return bytes + + def _write(self, bytes): + """Writes given bytes to connection. In case we catch any exception, + prepends remote address to the exception message and raise again. + """ + + try: + self._request.connection.write(bytes) + except Exception, e: + util.prepend_message_to_exception( + 'Failed to send message to %r: ' % + (self._request.connection.remote_addr,), + e) + raise + + def receive_bytes(self, length): + """Receives multiple bytes. Retries read when we couldn't receive the + specified amount. + + Raises: + ConnectionTerminatedException: when read returns empty string. + """ + + bytes = [] + while length > 0: + new_bytes = self._read(length) + bytes.append(new_bytes) + length -= len(new_bytes) + return ''.join(bytes) + + def _read_until(self, delim_char): + """Reads bytes until we encounter delim_char. The result will not + contain delim_char. + + Raises: + ConnectionTerminatedException: when read returns empty string. + """ + + bytes = [] + while True: + ch = self._read(1) + if ch == delim_char: + break + bytes.append(ch) + return ''.join(bytes) + + +# vi:sts=4 sw=4 et diff --git a/pyload/lib/mod_pywebsocket/_stream_hixie75.py b/pyload/lib/mod_pywebsocket/_stream_hixie75.py new file mode 100644 index 000000000..94cf5b31b --- /dev/null +++ b/pyload/lib/mod_pywebsocket/_stream_hixie75.py @@ -0,0 +1,229 @@ +# Copyright 2011, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +"""This file provides a class for parsing/building frames of the WebSocket +protocol version HyBi 00 and Hixie 75. + +Specification: +- HyBi 00 http://tools.ietf.org/html/draft-ietf-hybi-thewebsocketprotocol-00 +- Hixie 75 http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-75 +""" + + +from mod_pywebsocket import common +from mod_pywebsocket._stream_base import BadOperationException +from mod_pywebsocket._stream_base import ConnectionTerminatedException +from mod_pywebsocket._stream_base import InvalidFrameException +from mod_pywebsocket._stream_base import StreamBase +from mod_pywebsocket._stream_base import UnsupportedFrameException +from mod_pywebsocket import util + + +class StreamHixie75(StreamBase): + """A class for parsing/building frames of the WebSocket protocol version + HyBi 00 and Hixie 75. + """ + + def __init__(self, request, enable_closing_handshake=False): + """Construct an instance. + + Args: + request: mod_python request. + enable_closing_handshake: to let StreamHixie75 perform closing + handshake as specified in HyBi 00, set + this option to True. + """ + + StreamBase.__init__(self, request) + + self._logger = util.get_class_logger(self) + + self._enable_closing_handshake = enable_closing_handshake + + self._request.client_terminated = False + self._request.server_terminated = False + + def send_message(self, message, end=True, binary=False): + """Send message. + + Args: + message: unicode string to send. + binary: not used in hixie75. + + Raises: + BadOperationException: when called on a server-terminated + connection. + """ + + if not end: + raise BadOperationException( + 'StreamHixie75 doesn\'t support send_message with end=False') + + if binary: + raise BadOperationException( + 'StreamHixie75 doesn\'t support send_message with binary=True') + + if self._request.server_terminated: + raise BadOperationException( + 'Requested send_message after sending out a closing handshake') + + self._write(''.join(['\x00', message.encode('utf-8'), '\xff'])) + + def _read_payload_length_hixie75(self): + """Reads a length header in a Hixie75 version frame with length. + + Raises: + ConnectionTerminatedException: when read returns empty string. + """ + + length = 0 + while True: + b_str = self._read(1) + b = ord(b_str) + length = length * 128 + (b & 0x7f) + if (b & 0x80) == 0: + break + return length + + def receive_message(self): + """Receive a WebSocket frame and return its payload an unicode string. + + Returns: + payload unicode string in a WebSocket frame. + + Raises: + ConnectionTerminatedException: when read returns empty + string. + BadOperationException: when called on a client-terminated + connection. + """ + + if self._request.client_terminated: + raise BadOperationException( + 'Requested receive_message after receiving a closing ' + 'handshake') + + while True: + # Read 1 byte. + # mp_conn.read will block if no bytes are available. + # Timeout is controlled by TimeOut directive of Apache. + frame_type_str = self.receive_bytes(1) + frame_type = ord(frame_type_str) + if (frame_type & 0x80) == 0x80: + # The payload length is specified in the frame. + # Read and discard. + length = self._read_payload_length_hixie75() + if length > 0: + _ = self.receive_bytes(length) + # 5.3 3. 12. if /type/ is 0xFF and /length/ is 0, then set the + # /client terminated/ flag and abort these steps. + if not self._enable_closing_handshake: + continue + + if frame_type == 0xFF and length == 0: + self._request.client_terminated = True + + if self._request.server_terminated: + self._logger.debug( + 'Received ack for server-initiated closing ' + 'handshake') + return None + + self._logger.debug( + 'Received client-initiated closing handshake') + + self._send_closing_handshake() + self._logger.debug( + 'Sent ack for client-initiated closing handshake') + return None + else: + # The payload is delimited with \xff. + bytes = self._read_until('\xff') + # The WebSocket protocol section 4.4 specifies that invalid + # characters must be replaced with U+fffd REPLACEMENT + # CHARACTER. + message = bytes.decode('utf-8', 'replace') + if frame_type == 0x00: + return message + # Discard data of other types. + + def _send_closing_handshake(self): + if not self._enable_closing_handshake: + raise BadOperationException( + 'Closing handshake is not supported in Hixie 75 protocol') + + self._request.server_terminated = True + + # 5.3 the server may decide to terminate the WebSocket connection by + # running through the following steps: + # 1. send a 0xFF byte and a 0x00 byte to the client to indicate the + # start of the closing handshake. + self._write('\xff\x00') + + def close_connection(self, unused_code='', unused_reason=''): + """Closes a WebSocket connection. + + Raises: + ConnectionTerminatedException: when closing handshake was + not successfull. + """ + + if self._request.server_terminated: + self._logger.debug( + 'Requested close_connection but server is already terminated') + return + + if not self._enable_closing_handshake: + self._request.server_terminated = True + self._logger.debug('Connection closed') + return + + self._send_closing_handshake() + self._logger.debug('Sent server-initiated closing handshake') + + # TODO(ukai): 2. wait until the /client terminated/ flag has been set, + # or until a server-defined timeout expires. + # + # For now, we expect receiving closing handshake right after sending + # out closing handshake, and if we couldn't receive non-handshake + # frame, we take it as ConnectionTerminatedException. + message = self.receive_message() + if message is not None: + raise ConnectionTerminatedException( + 'Didn\'t receive valid ack for closing handshake') + # TODO: 3. close the WebSocket connection. + # note: mod_python Connection (mp_conn) doesn't have close method. + + def send_ping(self, body): + raise BadOperationException( + 'StreamHixie75 doesn\'t support send_ping') + + +# vi:sts=4 sw=4 et diff --git a/pyload/lib/mod_pywebsocket/_stream_hybi.py b/pyload/lib/mod_pywebsocket/_stream_hybi.py new file mode 100644 index 000000000..bd158fa6b --- /dev/null +++ b/pyload/lib/mod_pywebsocket/_stream_hybi.py @@ -0,0 +1,915 @@ +# Copyright 2012, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +"""This file provides classes and helper functions for parsing/building frames +of the WebSocket protocol (RFC 6455). + +Specification: +http://tools.ietf.org/html/rfc6455 +""" + + +from collections import deque +import logging +import os +import struct +import time + +from mod_pywebsocket import common +from mod_pywebsocket import util +from mod_pywebsocket._stream_base import BadOperationException +from mod_pywebsocket._stream_base import ConnectionTerminatedException +from mod_pywebsocket._stream_base import InvalidFrameException +from mod_pywebsocket._stream_base import InvalidUTF8Exception +from mod_pywebsocket._stream_base import StreamBase +from mod_pywebsocket._stream_base import UnsupportedFrameException + + +_NOOP_MASKER = util.NoopMasker() + + +class Frame(object): + + def __init__(self, fin=1, rsv1=0, rsv2=0, rsv3=0, + opcode=None, payload=''): + self.fin = fin + self.rsv1 = rsv1 + self.rsv2 = rsv2 + self.rsv3 = rsv3 + self.opcode = opcode + self.payload = payload + + +# Helper functions made public to be used for writing unittests for WebSocket +# clients. + + +def create_length_header(length, mask): + """Creates a length header. + + Args: + length: Frame length. Must be less than 2^63. + mask: Mask bit. Must be boolean. + + Raises: + ValueError: when bad data is given. + """ + + if mask: + mask_bit = 1 << 7 + else: + mask_bit = 0 + + if length < 0: + raise ValueError('length must be non negative integer') + elif length <= 125: + return chr(mask_bit | length) + elif length < (1 << 16): + return chr(mask_bit | 126) + struct.pack('!H', length) + elif length < (1 << 63): + return chr(mask_bit | 127) + struct.pack('!Q', length) + else: + raise ValueError('Payload is too big for one frame') + + +def create_header(opcode, payload_length, fin, rsv1, rsv2, rsv3, mask): + """Creates a frame header. + + Raises: + Exception: when bad data is given. + """ + + if opcode < 0 or 0xf < opcode: + raise ValueError('Opcode out of range') + + if payload_length < 0 or (1 << 63) <= payload_length: + raise ValueError('payload_length out of range') + + if (fin | rsv1 | rsv2 | rsv3) & ~1: + raise ValueError('FIN bit and Reserved bit parameter must be 0 or 1') + + header = '' + + first_byte = ((fin << 7) + | (rsv1 << 6) | (rsv2 << 5) | (rsv3 << 4) + | opcode) + header += chr(first_byte) + header += create_length_header(payload_length, mask) + + return header + + +def _build_frame(header, body, mask): + if not mask: + return header + body + + masking_nonce = os.urandom(4) + masker = util.RepeatedXorMasker(masking_nonce) + + return header + masking_nonce + masker.mask(body) + + +def _filter_and_format_frame_object(frame, mask, frame_filters): + for frame_filter in frame_filters: + frame_filter.filter(frame) + + header = create_header( + frame.opcode, len(frame.payload), frame.fin, + frame.rsv1, frame.rsv2, frame.rsv3, mask) + return _build_frame(header, frame.payload, mask) + + +def create_binary_frame( + message, opcode=common.OPCODE_BINARY, fin=1, mask=False, frame_filters=[]): + """Creates a simple binary frame with no extension, reserved bit.""" + + frame = Frame(fin=fin, opcode=opcode, payload=message) + return _filter_and_format_frame_object(frame, mask, frame_filters) + + +def create_text_frame( + message, opcode=common.OPCODE_TEXT, fin=1, mask=False, frame_filters=[]): + """Creates a simple text frame with no extension, reserved bit.""" + + encoded_message = message.encode('utf-8') + return create_binary_frame(encoded_message, opcode, fin, mask, + frame_filters) + + +def parse_frame(receive_bytes, logger=None, + ws_version=common.VERSION_HYBI_LATEST, + unmask_receive=True): + """Parses a frame. Returns a tuple containing each header field and + payload. + + Args: + receive_bytes: a function that reads frame data from a stream or + something similar. The function takes length of the bytes to be + read. The function must raise ConnectionTerminatedException if + there is not enough data to be read. + logger: a logging object. + ws_version: the version of WebSocket protocol. + unmask_receive: unmask received frames. When received unmasked + frame, raises InvalidFrameException. + + Raises: + ConnectionTerminatedException: when receive_bytes raises it. + InvalidFrameException: when the frame contains invalid data. + """ + + if not logger: + logger = logging.getLogger() + + logger.log(common.LOGLEVEL_FINE, 'Receive the first 2 octets of a frame') + + received = receive_bytes(2) + + first_byte = ord(received[0]) + fin = (first_byte >> 7) & 1 + rsv1 = (first_byte >> 6) & 1 + rsv2 = (first_byte >> 5) & 1 + rsv3 = (first_byte >> 4) & 1 + opcode = first_byte & 0xf + + second_byte = ord(received[1]) + mask = (second_byte >> 7) & 1 + payload_length = second_byte & 0x7f + + logger.log(common.LOGLEVEL_FINE, + 'FIN=%s, RSV1=%s, RSV2=%s, RSV3=%s, opcode=%s, ' + 'Mask=%s, Payload_length=%s', + fin, rsv1, rsv2, rsv3, opcode, mask, payload_length) + + if (mask == 1) != unmask_receive: + raise InvalidFrameException( + 'Mask bit on the received frame did\'nt match masking ' + 'configuration for received frames') + + # The HyBi and later specs disallow putting a value in 0x0-0xFFFF + # into the 8-octet extended payload length field (or 0x0-0xFD in + # 2-octet field). + valid_length_encoding = True + length_encoding_bytes = 1 + if payload_length == 127: + logger.log(common.LOGLEVEL_FINE, + 'Receive 8-octet extended payload length') + + extended_payload_length = receive_bytes(8) + payload_length = struct.unpack( + '!Q', extended_payload_length)[0] + if payload_length > 0x7FFFFFFFFFFFFFFF: + raise InvalidFrameException( + 'Extended payload length >= 2^63') + if ws_version >= 13 and payload_length < 0x10000: + valid_length_encoding = False + length_encoding_bytes = 8 + + logger.log(common.LOGLEVEL_FINE, + 'Decoded_payload_length=%s', payload_length) + elif payload_length == 126: + logger.log(common.LOGLEVEL_FINE, + 'Receive 2-octet extended payload length') + + extended_payload_length = receive_bytes(2) + payload_length = struct.unpack( + '!H', extended_payload_length)[0] + if ws_version >= 13 and payload_length < 126: + valid_length_encoding = False + length_encoding_bytes = 2 + + logger.log(common.LOGLEVEL_FINE, + 'Decoded_payload_length=%s', payload_length) + + if not valid_length_encoding: + logger.warning( + 'Payload length is not encoded using the minimal number of ' + 'bytes (%d is encoded using %d bytes)', + payload_length, + length_encoding_bytes) + + if mask == 1: + logger.log(common.LOGLEVEL_FINE, 'Receive mask') + + masking_nonce = receive_bytes(4) + masker = util.RepeatedXorMasker(masking_nonce) + + logger.log(common.LOGLEVEL_FINE, 'Mask=%r', masking_nonce) + else: + masker = _NOOP_MASKER + + logger.log(common.LOGLEVEL_FINE, 'Receive payload data') + if logger.isEnabledFor(common.LOGLEVEL_FINE): + receive_start = time.time() + + raw_payload_bytes = receive_bytes(payload_length) + + if logger.isEnabledFor(common.LOGLEVEL_FINE): + logger.log( + common.LOGLEVEL_FINE, + 'Done receiving payload data at %s MB/s', + payload_length / (time.time() - receive_start) / 1000 / 1000) + logger.log(common.LOGLEVEL_FINE, 'Unmask payload data') + + if logger.isEnabledFor(common.LOGLEVEL_FINE): + unmask_start = time.time() + + bytes = masker.mask(raw_payload_bytes) + + if logger.isEnabledFor(common.LOGLEVEL_FINE): + logger.log( + common.LOGLEVEL_FINE, + 'Done unmasking payload data at %s MB/s', + payload_length / (time.time() - unmask_start) / 1000 / 1000) + + return opcode, bytes, fin, rsv1, rsv2, rsv3 + + +class FragmentedFrameBuilder(object): + """A stateful class to send a message as fragments.""" + + def __init__(self, mask, frame_filters=[], encode_utf8=True): + """Constructs an instance.""" + + self._mask = mask + self._frame_filters = frame_filters + # This is for skipping UTF-8 encoding when building text type frames + # from compressed data. + self._encode_utf8 = encode_utf8 + + self._started = False + + # Hold opcode of the first frame in messages to verify types of other + # frames in the message are all the same. + self._opcode = common.OPCODE_TEXT + + def build(self, payload_data, end, binary): + if binary: + frame_type = common.OPCODE_BINARY + else: + frame_type = common.OPCODE_TEXT + if self._started: + if self._opcode != frame_type: + raise ValueError('Message types are different in frames for ' + 'the same message') + opcode = common.OPCODE_CONTINUATION + else: + opcode = frame_type + self._opcode = frame_type + + if end: + self._started = False + fin = 1 + else: + self._started = True + fin = 0 + + if binary or not self._encode_utf8: + return create_binary_frame( + payload_data, opcode, fin, self._mask, self._frame_filters) + else: + return create_text_frame( + payload_data, opcode, fin, self._mask, self._frame_filters) + + +def _create_control_frame(opcode, body, mask, frame_filters): + frame = Frame(opcode=opcode, payload=body) + + for frame_filter in frame_filters: + frame_filter.filter(frame) + + if len(frame.payload) > 125: + raise BadOperationException( + 'Payload data size of control frames must be 125 bytes or less') + + header = create_header( + frame.opcode, len(frame.payload), frame.fin, + frame.rsv1, frame.rsv2, frame.rsv3, mask) + return _build_frame(header, frame.payload, mask) + + +def create_ping_frame(body, mask=False, frame_filters=[]): + return _create_control_frame(common.OPCODE_PING, body, mask, frame_filters) + + +def create_pong_frame(body, mask=False, frame_filters=[]): + return _create_control_frame(common.OPCODE_PONG, body, mask, frame_filters) + + +def create_close_frame(body, mask=False, frame_filters=[]): + return _create_control_frame( + common.OPCODE_CLOSE, body, mask, frame_filters) + + +def create_closing_handshake_body(code, reason): + body = '' + if code is not None: + if (code > common.STATUS_USER_PRIVATE_MAX or + code < common.STATUS_NORMAL_CLOSURE): + raise BadOperationException('Status code is out of range') + if (code == common.STATUS_NO_STATUS_RECEIVED or + code == common.STATUS_ABNORMAL_CLOSURE or + code == common.STATUS_TLS_HANDSHAKE): + raise BadOperationException('Status code is reserved pseudo ' + 'code') + encoded_reason = reason.encode('utf-8') + body = struct.pack('!H', code) + encoded_reason + return body + + +class StreamOptions(object): + """Holds option values to configure Stream objects.""" + + def __init__(self): + """Constructs StreamOptions.""" + + # Enables deflate-stream extension. + self.deflate_stream = False + + # Filters applied to frames. + self.outgoing_frame_filters = [] + self.incoming_frame_filters = [] + + # Filters applied to messages. Control frames are not affected by them. + self.outgoing_message_filters = [] + self.incoming_message_filters = [] + + self.encode_text_message_to_utf8 = True + self.mask_send = False + self.unmask_receive = True + # RFC6455 disallows fragmented control frames, but mux extension + # relaxes the restriction. + self.allow_fragmented_control_frame = False + + +class Stream(StreamBase): + """A class for parsing/building frames of the WebSocket protocol + (RFC 6455). + """ + + def __init__(self, request, options): + """Constructs an instance. + + Args: + request: mod_python request. + """ + + StreamBase.__init__(self, request) + + self._logger = util.get_class_logger(self) + + self._options = options + + if self._options.deflate_stream: + self._logger.debug('Setup filter for deflate-stream') + self._request = util.DeflateRequest(self._request) + + self._request.client_terminated = False + self._request.server_terminated = False + + # Holds body of received fragments. + self._received_fragments = [] + # Holds the opcode of the first fragment. + self._original_opcode = None + + self._writer = FragmentedFrameBuilder( + self._options.mask_send, self._options.outgoing_frame_filters, + self._options.encode_text_message_to_utf8) + + self._ping_queue = deque() + + def _receive_frame(self): + """Receives a frame and return data in the frame as a tuple containing + each header field and payload separately. + + Raises: + ConnectionTerminatedException: when read returns empty + string. + InvalidFrameException: when the frame contains invalid data. + """ + + def _receive_bytes(length): + return self.receive_bytes(length) + + return parse_frame(receive_bytes=_receive_bytes, + logger=self._logger, + ws_version=self._request.ws_version, + unmask_receive=self._options.unmask_receive) + + def _receive_frame_as_frame_object(self): + opcode, bytes, fin, rsv1, rsv2, rsv3 = self._receive_frame() + + return Frame(fin=fin, rsv1=rsv1, rsv2=rsv2, rsv3=rsv3, + opcode=opcode, payload=bytes) + + def receive_filtered_frame(self): + """Receives a frame and applies frame filters and message filters. + The frame to be received must satisfy following conditions: + - The frame is not fragmented. + - The opcode of the frame is TEXT or BINARY. + + DO NOT USE this method except for testing purpose. + """ + + frame = self._receive_frame_as_frame_object() + if not frame.fin: + raise InvalidFrameException( + 'Segmented frames must not be received via ' + 'receive_filtered_frame()') + if (frame.opcode != common.OPCODE_TEXT and + frame.opcode != common.OPCODE_BINARY): + raise InvalidFrameException( + 'Control frames must not be received via ' + 'receive_filtered_frame()') + + for frame_filter in self._options.incoming_frame_filters: + frame_filter.filter(frame) + for message_filter in self._options.incoming_message_filters: + frame.payload = message_filter.filter(frame.payload) + return frame + + def send_message(self, message, end=True, binary=False): + """Send message. + + Args: + message: text in unicode or binary in str to send. + binary: send message as binary frame. + + Raises: + BadOperationException: when called on a server-terminated + connection or called with inconsistent message type or + binary parameter. + """ + + if self._request.server_terminated: + raise BadOperationException( + 'Requested send_message after sending out a closing handshake') + + if binary and isinstance(message, unicode): + raise BadOperationException( + 'Message for binary frame must be instance of str') + + for message_filter in self._options.outgoing_message_filters: + message = message_filter.filter(message, end, binary) + + try: + # Set this to any positive integer to limit maximum size of data in + # payload data of each frame. + MAX_PAYLOAD_DATA_SIZE = -1 + + if MAX_PAYLOAD_DATA_SIZE <= 0: + self._write(self._writer.build(message, end, binary)) + return + + bytes_written = 0 + while True: + end_for_this_frame = end + bytes_to_write = len(message) - bytes_written + if (MAX_PAYLOAD_DATA_SIZE > 0 and + bytes_to_write > MAX_PAYLOAD_DATA_SIZE): + end_for_this_frame = False + bytes_to_write = MAX_PAYLOAD_DATA_SIZE + + frame = self._writer.build( + message[bytes_written:bytes_written + bytes_to_write], + end_for_this_frame, + binary) + self._write(frame) + + bytes_written += bytes_to_write + + # This if must be placed here (the end of while block) so that + # at least one frame is sent. + if len(message) <= bytes_written: + break + except ValueError, e: + raise BadOperationException(e) + + def _get_message_from_frame(self, frame): + """Gets a message from frame. If the message is composed of fragmented + frames and the frame is not the last fragmented frame, this method + returns None. The whole message will be returned when the last + fragmented frame is passed to this method. + + Raises: + InvalidFrameException: when the frame doesn't match defragmentation + context, or the frame contains invalid data. + """ + + if frame.opcode == common.OPCODE_CONTINUATION: + if not self._received_fragments: + if frame.fin: + raise InvalidFrameException( + 'Received a termination frame but fragmentation ' + 'not started') + else: + raise InvalidFrameException( + 'Received an intermediate frame but ' + 'fragmentation not started') + + if frame.fin: + # End of fragmentation frame + self._received_fragments.append(frame.payload) + message = ''.join(self._received_fragments) + self._received_fragments = [] + return message + else: + # Intermediate frame + self._received_fragments.append(frame.payload) + return None + else: + if self._received_fragments: + if frame.fin: + raise InvalidFrameException( + 'Received an unfragmented frame without ' + 'terminating existing fragmentation') + else: + raise InvalidFrameException( + 'New fragmentation started without terminating ' + 'existing fragmentation') + + if frame.fin: + # Unfragmented frame + + self._original_opcode = frame.opcode + return frame.payload + else: + # Start of fragmentation frame + + if (not self._options.allow_fragmented_control_frame and + common.is_control_opcode(frame.opcode)): + raise InvalidFrameException( + 'Control frames must not be fragmented') + + self._original_opcode = frame.opcode + self._received_fragments.append(frame.payload) + return None + + def _process_close_message(self, message): + """Processes close message. + + Args: + message: close message. + + Raises: + InvalidFrameException: when the message is invalid. + """ + + self._request.client_terminated = True + + # Status code is optional. We can have status reason only if we + # have status code. Status reason can be empty string. So, + # allowed cases are + # - no application data: no code no reason + # - 2 octet of application data: has code but no reason + # - 3 or more octet of application data: both code and reason + if len(message) == 0: + self._logger.debug('Received close frame (empty body)') + self._request.ws_close_code = ( + common.STATUS_NO_STATUS_RECEIVED) + elif len(message) == 1: + raise InvalidFrameException( + 'If a close frame has status code, the length of ' + 'status code must be 2 octet') + elif len(message) >= 2: + self._request.ws_close_code = struct.unpack( + '!H', message[0:2])[0] + self._request.ws_close_reason = message[2:].decode( + 'utf-8', 'replace') + self._logger.debug( + 'Received close frame (code=%d, reason=%r)', + self._request.ws_close_code, + self._request.ws_close_reason) + + # Drain junk data after the close frame if necessary. + self._drain_received_data() + + if self._request.server_terminated: + self._logger.debug( + 'Received ack for server-initiated closing handshake') + return + + self._logger.debug( + 'Received client-initiated closing handshake') + + code = common.STATUS_NORMAL_CLOSURE + reason = '' + if hasattr(self._request, '_dispatcher'): + dispatcher = self._request._dispatcher + code, reason = dispatcher.passive_closing_handshake( + self._request) + if code is None and reason is not None and len(reason) > 0: + self._logger.warning( + 'Handler specified reason despite code being None') + reason = '' + if reason is None: + reason = '' + self._send_closing_handshake(code, reason) + self._logger.debug( + 'Sent ack for client-initiated closing handshake ' + '(code=%r, reason=%r)', code, reason) + + def _process_ping_message(self, message): + """Processes ping message. + + Args: + message: ping message. + """ + + try: + handler = self._request.on_ping_handler + if handler: + handler(self._request, message) + return + except AttributeError, e: + pass + self._send_pong(message) + + def _process_pong_message(self, message): + """Processes pong message. + + Args: + message: pong message. + """ + + # TODO(tyoshino): Add ping timeout handling. + + inflight_pings = deque() + + while True: + try: + expected_body = self._ping_queue.popleft() + if expected_body == message: + # inflight_pings contains pings ignored by the + # other peer. Just forget them. + self._logger.debug( + 'Ping %r is acked (%d pings were ignored)', + expected_body, len(inflight_pings)) + break + else: + inflight_pings.append(expected_body) + except IndexError, e: + # The received pong was unsolicited pong. Keep the + # ping queue as is. + self._ping_queue = inflight_pings + self._logger.debug('Received a unsolicited pong') + break + + try: + handler = self._request.on_pong_handler + if handler: + handler(self._request, message) + except AttributeError, e: + pass + + def receive_message(self): + """Receive a WebSocket frame and return its payload as a text in + unicode or a binary in str. + + Returns: + payload data of the frame + - as unicode instance if received text frame + - as str instance if received binary frame + or None iff received closing handshake. + Raises: + BadOperationException: when called on a client-terminated + connection. + ConnectionTerminatedException: when read returns empty + string. + InvalidFrameException: when the frame contains invalid + data. + UnsupportedFrameException: when the received frame has + flags, opcode we cannot handle. You can ignore this + exception and continue receiving the next frame. + """ + + if self._request.client_terminated: + raise BadOperationException( + 'Requested receive_message after receiving a closing ' + 'handshake') + + while True: + # mp_conn.read will block if no bytes are available. + # Timeout is controlled by TimeOut directive of Apache. + + frame = self._receive_frame_as_frame_object() + + # Check the constraint on the payload size for control frames + # before extension processes the frame. + # See also http://tools.ietf.org/html/rfc6455#section-5.5 + if (common.is_control_opcode(frame.opcode) and + len(frame.payload) > 125): + raise InvalidFrameException( + 'Payload data size of control frames must be 125 bytes or ' + 'less') + + for frame_filter in self._options.incoming_frame_filters: + frame_filter.filter(frame) + + if frame.rsv1 or frame.rsv2 or frame.rsv3: + raise UnsupportedFrameException( + 'Unsupported flag is set (rsv = %d%d%d)' % + (frame.rsv1, frame.rsv2, frame.rsv3)) + + message = self._get_message_from_frame(frame) + if message is None: + continue + + for message_filter in self._options.incoming_message_filters: + message = message_filter.filter(message) + + if self._original_opcode == common.OPCODE_TEXT: + # The WebSocket protocol section 4.4 specifies that invalid + # characters must be replaced with U+fffd REPLACEMENT + # CHARACTER. + try: + return message.decode('utf-8') + except UnicodeDecodeError, e: + raise InvalidUTF8Exception(e) + elif self._original_opcode == common.OPCODE_BINARY: + return message + elif self._original_opcode == common.OPCODE_CLOSE: + self._process_close_message(message) + return None + elif self._original_opcode == common.OPCODE_PING: + self._process_ping_message(message) + elif self._original_opcode == common.OPCODE_PONG: + self._process_pong_message(message) + else: + raise UnsupportedFrameException( + 'Opcode %d is not supported' % self._original_opcode) + + def _send_closing_handshake(self, code, reason): + body = create_closing_handshake_body(code, reason) + frame = create_close_frame( + body, mask=self._options.mask_send, + frame_filters=self._options.outgoing_frame_filters) + + self._request.server_terminated = True + + self._write(frame) + + def close_connection(self, code=common.STATUS_NORMAL_CLOSURE, reason=''): + """Closes a WebSocket connection. + + Args: + code: Status code for close frame. If code is None, a close + frame with empty body will be sent. + reason: string representing close reason. + Raises: + BadOperationException: when reason is specified with code None + or reason is not an instance of both str and unicode. + """ + + if self._request.server_terminated: + self._logger.debug( + 'Requested close_connection but server is already terminated') + return + + if code is None: + if reason is not None and len(reason) > 0: + raise BadOperationException( + 'close reason must not be specified if code is None') + reason = '' + else: + if not isinstance(reason, str) and not isinstance(reason, unicode): + raise BadOperationException( + 'close reason must be an instance of str or unicode') + + self._send_closing_handshake(code, reason) + self._logger.debug( + 'Sent server-initiated closing handshake (code=%r, reason=%r)', + code, reason) + + if (code == common.STATUS_GOING_AWAY or + code == common.STATUS_PROTOCOL_ERROR): + # It doesn't make sense to wait for a close frame if the reason is + # protocol error or that the server is going away. For some of + # other reasons, it might not make sense to wait for a close frame, + # but it's not clear, yet. + return + + # TODO(ukai): 2. wait until the /client terminated/ flag has been set, + # or until a server-defined timeout expires. + # + # For now, we expect receiving closing handshake right after sending + # out closing handshake. + message = self.receive_message() + if message is not None: + raise ConnectionTerminatedException( + 'Didn\'t receive valid ack for closing handshake') + # TODO: 3. close the WebSocket connection. + # note: mod_python Connection (mp_conn) doesn't have close method. + + def send_ping(self, body=''): + frame = create_ping_frame( + body, + self._options.mask_send, + self._options.outgoing_frame_filters) + self._write(frame) + + self._ping_queue.append(body) + + def _send_pong(self, body): + frame = create_pong_frame( + body, + self._options.mask_send, + self._options.outgoing_frame_filters) + self._write(frame) + + def get_last_received_opcode(self): + """Returns the opcode of the WebSocket message which the last received + frame belongs to. The return value is valid iff immediately after + receive_message call. + """ + + return self._original_opcode + + def _drain_received_data(self): + """Drains unread data in the receive buffer to avoid sending out TCP + RST packet. This is because when deflate-stream is enabled, some + DEFLATE block for flushing data may follow a close frame. If any data + remains in the receive buffer of a socket when the socket is closed, + it sends out TCP RST packet to the other peer. + + Since mod_python's mp_conn object doesn't support non-blocking read, + we perform this only when pywebsocket is running in standalone mode. + """ + + # If self._options.deflate_stream is true, self._request is + # DeflateRequest, so we can get wrapped request object by + # self._request._request. + # + # Only _StandaloneRequest has _drain_received_data method. + if (self._options.deflate_stream and + ('_drain_received_data' in dir(self._request._request))): + self._request._request._drain_received_data() + + +# vi:sts=4 sw=4 et diff --git a/pyload/lib/mod_pywebsocket/common.py b/pyload/lib/mod_pywebsocket/common.py new file mode 100644 index 000000000..2388379c0 --- /dev/null +++ b/pyload/lib/mod_pywebsocket/common.py @@ -0,0 +1,307 @@ +# Copyright 2012, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +"""This file must not depend on any module specific to the WebSocket protocol. +""" + + +from mod_pywebsocket import http_header_util + + +# Additional log level definitions. +LOGLEVEL_FINE = 9 + +# Constants indicating WebSocket protocol version. +VERSION_HIXIE75 = -1 +VERSION_HYBI00 = 0 +VERSION_HYBI01 = 1 +VERSION_HYBI02 = 2 +VERSION_HYBI03 = 2 +VERSION_HYBI04 = 4 +VERSION_HYBI05 = 5 +VERSION_HYBI06 = 6 +VERSION_HYBI07 = 7 +VERSION_HYBI08 = 8 +VERSION_HYBI09 = 8 +VERSION_HYBI10 = 8 +VERSION_HYBI11 = 8 +VERSION_HYBI12 = 8 +VERSION_HYBI13 = 13 +VERSION_HYBI14 = 13 +VERSION_HYBI15 = 13 +VERSION_HYBI16 = 13 +VERSION_HYBI17 = 13 + +# Constants indicating WebSocket protocol latest version. +VERSION_HYBI_LATEST = VERSION_HYBI13 + +# Port numbers +DEFAULT_WEB_SOCKET_PORT = 80 +DEFAULT_WEB_SOCKET_SECURE_PORT = 443 + +# Schemes +WEB_SOCKET_SCHEME = 'ws' +WEB_SOCKET_SECURE_SCHEME = 'wss' + +# Frame opcodes defined in the spec. +OPCODE_CONTINUATION = 0x0 +OPCODE_TEXT = 0x1 +OPCODE_BINARY = 0x2 +OPCODE_CLOSE = 0x8 +OPCODE_PING = 0x9 +OPCODE_PONG = 0xa + +# UUIDs used by HyBi 04 and later opening handshake and frame masking. +WEBSOCKET_ACCEPT_UUID = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11' + +# Opening handshake header names and expected values. +UPGRADE_HEADER = 'Upgrade' +WEBSOCKET_UPGRADE_TYPE = 'websocket' +WEBSOCKET_UPGRADE_TYPE_HIXIE75 = 'WebSocket' +CONNECTION_HEADER = 'Connection' +UPGRADE_CONNECTION_TYPE = 'Upgrade' +HOST_HEADER = 'Host' +ORIGIN_HEADER = 'Origin' +SEC_WEBSOCKET_ORIGIN_HEADER = 'Sec-WebSocket-Origin' +SEC_WEBSOCKET_KEY_HEADER = 'Sec-WebSocket-Key' +SEC_WEBSOCKET_ACCEPT_HEADER = 'Sec-WebSocket-Accept' +SEC_WEBSOCKET_VERSION_HEADER = 'Sec-WebSocket-Version' +SEC_WEBSOCKET_PROTOCOL_HEADER = 'Sec-WebSocket-Protocol' +SEC_WEBSOCKET_EXTENSIONS_HEADER = 'Sec-WebSocket-Extensions' +SEC_WEBSOCKET_DRAFT_HEADER = 'Sec-WebSocket-Draft' +SEC_WEBSOCKET_KEY1_HEADER = 'Sec-WebSocket-Key1' +SEC_WEBSOCKET_KEY2_HEADER = 'Sec-WebSocket-Key2' +SEC_WEBSOCKET_LOCATION_HEADER = 'Sec-WebSocket-Location' + +# Extensions +DEFLATE_STREAM_EXTENSION = 'deflate-stream' +DEFLATE_FRAME_EXTENSION = 'deflate-frame' +PERFRAME_COMPRESSION_EXTENSION = 'perframe-compress' +PERMESSAGE_COMPRESSION_EXTENSION = 'permessage-compress' +X_WEBKIT_DEFLATE_FRAME_EXTENSION = 'x-webkit-deflate-frame' +X_WEBKIT_PERMESSAGE_COMPRESSION_EXTENSION = 'x-webkit-permessage-compress' +MUX_EXTENSION = 'mux_DO_NOT_USE' + +# Status codes +# Code STATUS_NO_STATUS_RECEIVED, STATUS_ABNORMAL_CLOSURE, and +# STATUS_TLS_HANDSHAKE are pseudo codes to indicate specific error cases. +# Could not be used for codes in actual closing frames. +# Application level errors must use codes in the range +# STATUS_USER_REGISTERED_BASE to STATUS_USER_PRIVATE_MAX. The codes in the +# range STATUS_USER_REGISTERED_BASE to STATUS_USER_REGISTERED_MAX are managed +# by IANA. Usually application must define user protocol level errors in the +# range STATUS_USER_PRIVATE_BASE to STATUS_USER_PRIVATE_MAX. +STATUS_NORMAL_CLOSURE = 1000 +STATUS_GOING_AWAY = 1001 +STATUS_PROTOCOL_ERROR = 1002 +STATUS_UNSUPPORTED_DATA = 1003 +STATUS_NO_STATUS_RECEIVED = 1005 +STATUS_ABNORMAL_CLOSURE = 1006 +STATUS_INVALID_FRAME_PAYLOAD_DATA = 1007 +STATUS_POLICY_VIOLATION = 1008 +STATUS_MESSAGE_TOO_BIG = 1009 +STATUS_MANDATORY_EXTENSION = 1010 +STATUS_INTERNAL_ENDPOINT_ERROR = 1011 +STATUS_TLS_HANDSHAKE = 1015 +STATUS_USER_REGISTERED_BASE = 3000 +STATUS_USER_REGISTERED_MAX = 3999 +STATUS_USER_PRIVATE_BASE = 4000 +STATUS_USER_PRIVATE_MAX = 4999 +# Following definitions are aliases to keep compatibility. Applications must +# not use these obsoleted definitions anymore. +STATUS_NORMAL = STATUS_NORMAL_CLOSURE +STATUS_UNSUPPORTED = STATUS_UNSUPPORTED_DATA +STATUS_CODE_NOT_AVAILABLE = STATUS_NO_STATUS_RECEIVED +STATUS_ABNORMAL_CLOSE = STATUS_ABNORMAL_CLOSURE +STATUS_INVALID_FRAME_PAYLOAD = STATUS_INVALID_FRAME_PAYLOAD_DATA +STATUS_MANDATORY_EXT = STATUS_MANDATORY_EXTENSION + +# HTTP status codes +HTTP_STATUS_BAD_REQUEST = 400 +HTTP_STATUS_FORBIDDEN = 403 +HTTP_STATUS_NOT_FOUND = 404 + + +def is_control_opcode(opcode): + return (opcode >> 3) == 1 + + +class ExtensionParameter(object): + """Holds information about an extension which is exchanged on extension + negotiation in opening handshake. + """ + + def __init__(self, name): + self._name = name + # TODO(tyoshino): Change the data structure to more efficient one such + # as dict when the spec changes to say like + # - Parameter names must be unique + # - The order of parameters is not significant + self._parameters = [] + + def name(self): + return self._name + + def add_parameter(self, name, value): + self._parameters.append((name, value)) + + def get_parameters(self): + return self._parameters + + def get_parameter_names(self): + return [name for name, unused_value in self._parameters] + + def has_parameter(self, name): + for param_name, param_value in self._parameters: + if param_name == name: + return True + return False + + def get_parameter_value(self, name): + for param_name, param_value in self._parameters: + if param_name == name: + return param_value + + +class ExtensionParsingException(Exception): + def __init__(self, name): + super(ExtensionParsingException, self).__init__(name) + + +def _parse_extension_param(state, definition, allow_quoted_string): + param_name = http_header_util.consume_token(state) + + if param_name is None: + raise ExtensionParsingException('No valid parameter name found') + + http_header_util.consume_lwses(state) + + if not http_header_util.consume_string(state, '='): + definition.add_parameter(param_name, None) + return + + http_header_util.consume_lwses(state) + + if allow_quoted_string: + # TODO(toyoshim): Add code to validate that parsed param_value is token + param_value = http_header_util.consume_token_or_quoted_string(state) + else: + param_value = http_header_util.consume_token(state) + if param_value is None: + raise ExtensionParsingException( + 'No valid parameter value found on the right-hand side of ' + 'parameter %r' % param_name) + + definition.add_parameter(param_name, param_value) + + +def _parse_extension(state, allow_quoted_string): + extension_token = http_header_util.consume_token(state) + if extension_token is None: + return None + + extension = ExtensionParameter(extension_token) + + while True: + http_header_util.consume_lwses(state) + + if not http_header_util.consume_string(state, ';'): + break + + http_header_util.consume_lwses(state) + + try: + _parse_extension_param(state, extension, allow_quoted_string) + except ExtensionParsingException, e: + raise ExtensionParsingException( + 'Failed to parse parameter for %r (%r)' % + (extension_token, e)) + + return extension + + +def parse_extensions(data, allow_quoted_string=False): + """Parses Sec-WebSocket-Extensions header value returns a list of + ExtensionParameter objects. + + Leading LWSes must be trimmed. + """ + + state = http_header_util.ParsingState(data) + + extension_list = [] + while True: + extension = _parse_extension(state, allow_quoted_string) + if extension is not None: + extension_list.append(extension) + + http_header_util.consume_lwses(state) + + if http_header_util.peek(state) is None: + break + + if not http_header_util.consume_string(state, ','): + raise ExtensionParsingException( + 'Failed to parse Sec-WebSocket-Extensions header: ' + 'Expected a comma but found %r' % + http_header_util.peek(state)) + + http_header_util.consume_lwses(state) + + if len(extension_list) == 0: + raise ExtensionParsingException( + 'No valid extension entry found') + + return extension_list + + +def format_extension(extension): + """Formats an ExtensionParameter object.""" + + formatted_params = [extension.name()] + for param_name, param_value in extension.get_parameters(): + if param_value is None: + formatted_params.append(param_name) + else: + quoted_value = http_header_util.quote_if_necessary(param_value) + formatted_params.append('%s=%s' % (param_name, quoted_value)) + return '; '.join(formatted_params) + + +def format_extensions(extension_list): + """Formats a list of ExtensionParameter objects.""" + + formatted_extension_list = [] + for extension in extension_list: + formatted_extension_list.append(format_extension(extension)) + return ', '.join(formatted_extension_list) + + +# vi:sts=4 sw=4 et diff --git a/pyload/lib/mod_pywebsocket/dispatch.py b/pyload/lib/mod_pywebsocket/dispatch.py new file mode 100644 index 000000000..25905f180 --- /dev/null +++ b/pyload/lib/mod_pywebsocket/dispatch.py @@ -0,0 +1,387 @@ +# Copyright 2012, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +"""Dispatch WebSocket request. +""" + + +import logging +import os +import re + +from mod_pywebsocket import common +from mod_pywebsocket import handshake +from mod_pywebsocket import msgutil +from mod_pywebsocket import mux +from mod_pywebsocket import stream +from mod_pywebsocket import util + + +_SOURCE_PATH_PATTERN = re.compile(r'(?i)_wsh\.py$') +_SOURCE_SUFFIX = '_wsh.py' +_DO_EXTRA_HANDSHAKE_HANDLER_NAME = 'web_socket_do_extra_handshake' +_TRANSFER_DATA_HANDLER_NAME = 'web_socket_transfer_data' +_PASSIVE_CLOSING_HANDSHAKE_HANDLER_NAME = ( + 'web_socket_passive_closing_handshake') + + +class DispatchException(Exception): + """Exception in dispatching WebSocket request.""" + + def __init__(self, name, status=common.HTTP_STATUS_NOT_FOUND): + super(DispatchException, self).__init__(name) + self.status = status + + +def _default_passive_closing_handshake_handler(request): + """Default web_socket_passive_closing_handshake handler.""" + + return common.STATUS_NORMAL_CLOSURE, '' + + +def _normalize_path(path): + """Normalize path. + + Args: + path: the path to normalize. + + Path is converted to the absolute path. + The input path can use either '\\' or '/' as the separator. + The normalized path always uses '/' regardless of the platform. + """ + + path = path.replace('\\', os.path.sep) + path = os.path.realpath(path) + path = path.replace('\\', '/') + return path + + +def _create_path_to_resource_converter(base_dir): + """Returns a function that converts the path of a WebSocket handler source + file to a resource string by removing the path to the base directory from + its head, removing _SOURCE_SUFFIX from its tail, and replacing path + separators in it with '/'. + + Args: + base_dir: the path to the base directory. + """ + + base_dir = _normalize_path(base_dir) + + base_len = len(base_dir) + suffix_len = len(_SOURCE_SUFFIX) + + def converter(path): + if not path.endswith(_SOURCE_SUFFIX): + return None + # _normalize_path must not be used because resolving symlink breaks + # following path check. + path = path.replace('\\', '/') + if not path.startswith(base_dir): + return None + return path[base_len:-suffix_len] + + return converter + + +def _enumerate_handler_file_paths(directory): + """Returns a generator that enumerates WebSocket Handler source file names + in the given directory. + """ + + for root, unused_dirs, files in os.walk(directory): + for base in files: + path = os.path.join(root, base) + if _SOURCE_PATH_PATTERN.search(path): + yield path + + +class _HandlerSuite(object): + """A handler suite holder class.""" + + def __init__(self, do_extra_handshake, transfer_data, + passive_closing_handshake): + self.do_extra_handshake = do_extra_handshake + self.transfer_data = transfer_data + self.passive_closing_handshake = passive_closing_handshake + + +def _source_handler_file(handler_definition): + """Source a handler definition string. + + Args: + handler_definition: a string containing Python statements that define + handler functions. + """ + + global_dic = {} + try: + exec handler_definition in global_dic + except Exception: + raise DispatchException('Error in sourcing handler:' + + util.get_stack_trace()) + passive_closing_handshake_handler = None + try: + passive_closing_handshake_handler = _extract_handler( + global_dic, _PASSIVE_CLOSING_HANDSHAKE_HANDLER_NAME) + except Exception: + passive_closing_handshake_handler = ( + _default_passive_closing_handshake_handler) + return _HandlerSuite( + _extract_handler(global_dic, _DO_EXTRA_HANDSHAKE_HANDLER_NAME), + _extract_handler(global_dic, _TRANSFER_DATA_HANDLER_NAME), + passive_closing_handshake_handler) + + +def _extract_handler(dic, name): + """Extracts a callable with the specified name from the given dictionary + dic. + """ + + if name not in dic: + raise DispatchException('%s is not defined.' % name) + handler = dic[name] + if not callable(handler): + raise DispatchException('%s is not callable.' % name) + return handler + + +class Dispatcher(object): + """Dispatches WebSocket requests. + + This class maintains a map from resource name to handlers. + """ + + def __init__( + self, root_dir, scan_dir=None, + allow_handlers_outside_root_dir=True): + """Construct an instance. + + Args: + root_dir: The directory where handler definition files are + placed. + scan_dir: The directory where handler definition files are + searched. scan_dir must be a directory under root_dir, + including root_dir itself. If scan_dir is None, + root_dir is used as scan_dir. scan_dir can be useful + in saving scan time when root_dir contains many + subdirectories. + allow_handlers_outside_root_dir: Scans handler files even if their + canonical path is not under root_dir. + """ + + self._logger = util.get_class_logger(self) + + self._handler_suite_map = {} + self._source_warnings = [] + if scan_dir is None: + scan_dir = root_dir + if not os.path.realpath(scan_dir).startswith( + os.path.realpath(root_dir)): + raise DispatchException('scan_dir:%s must be a directory under ' + 'root_dir:%s.' % (scan_dir, root_dir)) + self._source_handler_files_in_dir( + root_dir, scan_dir, allow_handlers_outside_root_dir) + + def add_resource_path_alias(self, + alias_resource_path, existing_resource_path): + """Add resource path alias. + + Once added, request to alias_resource_path would be handled by + handler registered for existing_resource_path. + + Args: + alias_resource_path: alias resource path + existing_resource_path: existing resource path + """ + try: + handler_suite = self._handler_suite_map[existing_resource_path] + self._handler_suite_map[alias_resource_path] = handler_suite + except KeyError: + raise DispatchException('No handler for: %r' % + existing_resource_path) + + def source_warnings(self): + """Return warnings in sourcing handlers.""" + + return self._source_warnings + + def do_extra_handshake(self, request): + """Do extra checking in WebSocket handshake. + + Select a handler based on request.uri and call its + web_socket_do_extra_handshake function. + + Args: + request: mod_python request. + + Raises: + DispatchException: when handler was not found + AbortedByUserException: when user handler abort connection + HandshakeException: when opening handshake failed + """ + + handler_suite = self.get_handler_suite(request.ws_resource) + if handler_suite is None: + raise DispatchException('No handler for: %r' % request.ws_resource) + do_extra_handshake_ = handler_suite.do_extra_handshake + try: + do_extra_handshake_(request) + except handshake.AbortedByUserException, e: + raise + except Exception, e: + util.prepend_message_to_exception( + '%s raised exception for %s: ' % ( + _DO_EXTRA_HANDSHAKE_HANDLER_NAME, + request.ws_resource), + e) + raise handshake.HandshakeException(e, common.HTTP_STATUS_FORBIDDEN) + + def transfer_data(self, request): + """Let a handler transfer_data with a WebSocket client. + + Select a handler based on request.ws_resource and call its + web_socket_transfer_data function. + + Args: + request: mod_python request. + + Raises: + DispatchException: when handler was not found + AbortedByUserException: when user handler abort connection + """ + + # TODO(tyoshino): Terminate underlying TCP connection if possible. + try: + if mux.use_mux(request): + mux.start(request, self) + else: + handler_suite = self.get_handler_suite(request.ws_resource) + if handler_suite is None: + raise DispatchException('No handler for: %r' % + request.ws_resource) + transfer_data_ = handler_suite.transfer_data + transfer_data_(request) + + if not request.server_terminated: + request.ws_stream.close_connection() + # Catch non-critical exceptions the handler didn't handle. + except handshake.AbortedByUserException, e: + self._logger.debug('%s', e) + raise + except msgutil.BadOperationException, e: + self._logger.debug('%s', e) + request.ws_stream.close_connection(common.STATUS_ABNORMAL_CLOSURE) + except msgutil.InvalidFrameException, e: + # InvalidFrameException must be caught before + # ConnectionTerminatedException that catches InvalidFrameException. + self._logger.debug('%s', e) + request.ws_stream.close_connection(common.STATUS_PROTOCOL_ERROR) + except msgutil.UnsupportedFrameException, e: + self._logger.debug('%s', e) + request.ws_stream.close_connection(common.STATUS_UNSUPPORTED_DATA) + except stream.InvalidUTF8Exception, e: + self._logger.debug('%s', e) + request.ws_stream.close_connection( + common.STATUS_INVALID_FRAME_PAYLOAD_DATA) + except msgutil.ConnectionTerminatedException, e: + self._logger.debug('%s', e) + except Exception, e: + util.prepend_message_to_exception( + '%s raised exception for %s: ' % ( + _TRANSFER_DATA_HANDLER_NAME, request.ws_resource), + e) + raise + + def passive_closing_handshake(self, request): + """Prepare code and reason for responding client initiated closing + handshake. + """ + + handler_suite = self.get_handler_suite(request.ws_resource) + if handler_suite is None: + return _default_passive_closing_handshake_handler(request) + return handler_suite.passive_closing_handshake(request) + + def get_handler_suite(self, resource): + """Retrieves two handlers (one for extra handshake processing, and one + for data transfer) for the given request as a HandlerSuite object. + """ + + fragment = None + if '#' in resource: + resource, fragment = resource.split('#', 1) + if '?' in resource: + resource = resource.split('?', 1)[0] + handler_suite = self._handler_suite_map.get(resource) + if handler_suite and fragment: + raise DispatchException('Fragment identifiers MUST NOT be used on ' + 'WebSocket URIs', + common.HTTP_STATUS_BAD_REQUEST) + return handler_suite + + def _source_handler_files_in_dir( + self, root_dir, scan_dir, allow_handlers_outside_root_dir): + """Source all the handler source files in the scan_dir directory. + + The resource path is determined relative to root_dir. + """ + + # We build a map from resource to handler code assuming that there's + # only one path from root_dir to scan_dir and it can be obtained by + # comparing realpath of them. + + # Here we cannot use abspath. See + # https://bugs.webkit.org/show_bug.cgi?id=31603 + + convert = _create_path_to_resource_converter(root_dir) + scan_realpath = os.path.realpath(scan_dir) + root_realpath = os.path.realpath(root_dir) + for path in _enumerate_handler_file_paths(scan_realpath): + if (not allow_handlers_outside_root_dir and + (not os.path.realpath(path).startswith(root_realpath))): + self._logger.debug( + 'Canonical path of %s is not under root directory' % + path) + continue + try: + handler_suite = _source_handler_file(open(path).read()) + except DispatchException, e: + self._source_warnings.append('%s: %s' % (path, e)) + continue + resource = convert(path) + if resource is None: + self._logger.debug( + 'Path to resource conversion on %s failed' % path) + else: + self._handler_suite_map[convert(path)] = handler_suite + + +# vi:sts=4 sw=4 et diff --git a/pyload/lib/mod_pywebsocket/extensions.py b/pyload/lib/mod_pywebsocket/extensions.py new file mode 100644 index 000000000..03dbf9ee1 --- /dev/null +++ b/pyload/lib/mod_pywebsocket/extensions.py @@ -0,0 +1,727 @@ +# Copyright 2012, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +from mod_pywebsocket import common +from mod_pywebsocket import util +from mod_pywebsocket.http_header_util import quote_if_necessary + + +_available_processors = {} + + +class ExtensionProcessorInterface(object): + + def name(self): + return None + + def get_extension_response(self): + return None + + def setup_stream_options(self, stream_options): + pass + + +class DeflateStreamExtensionProcessor(ExtensionProcessorInterface): + """WebSocket DEFLATE stream extension processor. + + Specification: + Section 9.2.1 in + http://tools.ietf.org/html/draft-ietf-hybi-thewebsocketprotocol-10 + """ + + def __init__(self, request): + self._logger = util.get_class_logger(self) + + self._request = request + + def name(self): + return common.DEFLATE_STREAM_EXTENSION + + def get_extension_response(self): + if len(self._request.get_parameter_names()) != 0: + return None + + self._logger.debug( + 'Enable %s extension', common.DEFLATE_STREAM_EXTENSION) + + return common.ExtensionParameter(common.DEFLATE_STREAM_EXTENSION) + + def setup_stream_options(self, stream_options): + stream_options.deflate_stream = True + + +_available_processors[common.DEFLATE_STREAM_EXTENSION] = ( + DeflateStreamExtensionProcessor) + + +def _log_compression_ratio(logger, original_bytes, total_original_bytes, + filtered_bytes, total_filtered_bytes): + # Print inf when ratio is not available. + ratio = float('inf') + average_ratio = float('inf') + if original_bytes != 0: + ratio = float(filtered_bytes) / original_bytes + if total_original_bytes != 0: + average_ratio = ( + float(total_filtered_bytes) / total_original_bytes) + logger.debug('Outgoing compress ratio: %f (average: %f)' % + (ratio, average_ratio)) + + +def _log_decompression_ratio(logger, received_bytes, total_received_bytes, + filtered_bytes, total_filtered_bytes): + # Print inf when ratio is not available. + ratio = float('inf') + average_ratio = float('inf') + if received_bytes != 0: + ratio = float(received_bytes) / filtered_bytes + if total_filtered_bytes != 0: + average_ratio = ( + float(total_received_bytes) / total_filtered_bytes) + logger.debug('Incoming compress ratio: %f (average: %f)' % + (ratio, average_ratio)) + + +class DeflateFrameExtensionProcessor(ExtensionProcessorInterface): + """WebSocket Per-frame DEFLATE extension processor. + + Specification: + http://tools.ietf.org/html/draft-tyoshino-hybi-websocket-perframe-deflate + """ + + _WINDOW_BITS_PARAM = 'max_window_bits' + _NO_CONTEXT_TAKEOVER_PARAM = 'no_context_takeover' + + def __init__(self, request): + self._logger = util.get_class_logger(self) + + self._request = request + + self._response_window_bits = None + self._response_no_context_takeover = False + self._bfinal = False + + # Counters for statistics. + + # Total number of outgoing bytes supplied to this filter. + self._total_outgoing_payload_bytes = 0 + # Total number of bytes sent to the network after applying this filter. + self._total_filtered_outgoing_payload_bytes = 0 + + # Total number of bytes received from the network. + self._total_incoming_payload_bytes = 0 + # Total number of incoming bytes obtained after applying this filter. + self._total_filtered_incoming_payload_bytes = 0 + + def name(self): + return common.DEFLATE_FRAME_EXTENSION + + def get_extension_response(self): + # Any unknown parameter will be just ignored. + + window_bits = self._request.get_parameter_value( + self._WINDOW_BITS_PARAM) + no_context_takeover = self._request.has_parameter( + self._NO_CONTEXT_TAKEOVER_PARAM) + if (no_context_takeover and + self._request.get_parameter_value( + self._NO_CONTEXT_TAKEOVER_PARAM) is not None): + return None + + if window_bits is not None: + try: + window_bits = int(window_bits) + except ValueError, e: + return None + if window_bits < 8 or window_bits > 15: + return None + + self._deflater = util._RFC1979Deflater( + window_bits, no_context_takeover) + + self._inflater = util._RFC1979Inflater() + + self._compress_outgoing = True + + response = common.ExtensionParameter(self._request.name()) + + if self._response_window_bits is not None: + response.add_parameter( + self._WINDOW_BITS_PARAM, str(self._response_window_bits)) + if self._response_no_context_takeover: + response.add_parameter( + self._NO_CONTEXT_TAKEOVER_PARAM, None) + + self._logger.debug( + 'Enable %s extension (' + 'request: window_bits=%s; no_context_takeover=%r, ' + 'response: window_wbits=%s; no_context_takeover=%r)' % + (self._request.name(), + window_bits, + no_context_takeover, + self._response_window_bits, + self._response_no_context_takeover)) + + return response + + def setup_stream_options(self, stream_options): + + class _OutgoingFilter(object): + + def __init__(self, parent): + self._parent = parent + + def filter(self, frame): + self._parent._outgoing_filter(frame) + + class _IncomingFilter(object): + + def __init__(self, parent): + self._parent = parent + + def filter(self, frame): + self._parent._incoming_filter(frame) + + stream_options.outgoing_frame_filters.append( + _OutgoingFilter(self)) + stream_options.incoming_frame_filters.insert( + 0, _IncomingFilter(self)) + + def set_response_window_bits(self, value): + self._response_window_bits = value + + def set_response_no_context_takeover(self, value): + self._response_no_context_takeover = value + + def set_bfinal(self, value): + self._bfinal = value + + def enable_outgoing_compression(self): + self._compress_outgoing = True + + def disable_outgoing_compression(self): + self._compress_outgoing = False + + def _outgoing_filter(self, frame): + """Transform outgoing frames. This method is called only by + an _OutgoingFilter instance. + """ + + original_payload_size = len(frame.payload) + self._total_outgoing_payload_bytes += original_payload_size + + if (not self._compress_outgoing or + common.is_control_opcode(frame.opcode)): + self._total_filtered_outgoing_payload_bytes += ( + original_payload_size) + return + + frame.payload = self._deflater.filter( + frame.payload, bfinal=self._bfinal) + frame.rsv1 = 1 + + filtered_payload_size = len(frame.payload) + self._total_filtered_outgoing_payload_bytes += filtered_payload_size + + _log_compression_ratio(self._logger, original_payload_size, + self._total_outgoing_payload_bytes, + filtered_payload_size, + self._total_filtered_outgoing_payload_bytes) + + def _incoming_filter(self, frame): + """Transform incoming frames. This method is called only by + an _IncomingFilter instance. + """ + + received_payload_size = len(frame.payload) + self._total_incoming_payload_bytes += received_payload_size + + if frame.rsv1 != 1 or common.is_control_opcode(frame.opcode): + self._total_filtered_incoming_payload_bytes += ( + received_payload_size) + return + + frame.payload = self._inflater.filter(frame.payload) + frame.rsv1 = 0 + + filtered_payload_size = len(frame.payload) + self._total_filtered_incoming_payload_bytes += filtered_payload_size + + _log_decompression_ratio(self._logger, received_payload_size, + self._total_incoming_payload_bytes, + filtered_payload_size, + self._total_filtered_incoming_payload_bytes) + + +_available_processors[common.DEFLATE_FRAME_EXTENSION] = ( + DeflateFrameExtensionProcessor) + + +# Adding vendor-prefixed deflate-frame extension. +# TODO(bashi): Remove this after WebKit stops using vendor prefix. +_available_processors[common.X_WEBKIT_DEFLATE_FRAME_EXTENSION] = ( + DeflateFrameExtensionProcessor) + + +def _parse_compression_method(data): + """Parses the value of "method" extension parameter.""" + + return common.parse_extensions(data, allow_quoted_string=True) + + +def _create_accepted_method_desc(method_name, method_params): + """Creates accepted-method-desc from given method name and parameters""" + + extension = common.ExtensionParameter(method_name) + for name, value in method_params: + extension.add_parameter(name, value) + return common.format_extension(extension) + + +class CompressionExtensionProcessorBase(ExtensionProcessorInterface): + """Base class for Per-frame and Per-message compression extension.""" + + _METHOD_PARAM = 'method' + + def __init__(self, request): + self._logger = util.get_class_logger(self) + self._request = request + self._compression_method_name = None + self._compression_processor = None + self._compression_processor_hook = None + + def name(self): + return '' + + def _lookup_compression_processor(self, method_desc): + return None + + def _get_compression_processor_response(self): + """Looks up the compression processor based on the self._request and + returns the compression processor's response. + """ + + method_list = self._request.get_parameter_value(self._METHOD_PARAM) + if method_list is None: + return None + methods = _parse_compression_method(method_list) + if methods is None: + return None + comression_processor = None + # The current implementation tries only the first method that matches + # supported algorithm. Following methods aren't tried even if the + # first one is rejected. + # TODO(bashi): Need to clarify this behavior. + for method_desc in methods: + compression_processor = self._lookup_compression_processor( + method_desc) + if compression_processor is not None: + self._compression_method_name = method_desc.name() + break + if compression_processor is None: + return None + + if self._compression_processor_hook: + self._compression_processor_hook(compression_processor) + + processor_response = compression_processor.get_extension_response() + if processor_response is None: + return None + self._compression_processor = compression_processor + return processor_response + + def get_extension_response(self): + processor_response = self._get_compression_processor_response() + if processor_response is None: + return None + + response = common.ExtensionParameter(self._request.name()) + accepted_method_desc = _create_accepted_method_desc( + self._compression_method_name, + processor_response.get_parameters()) + response.add_parameter(self._METHOD_PARAM, accepted_method_desc) + self._logger.debug( + 'Enable %s extension (method: %s)' % + (self._request.name(), self._compression_method_name)) + return response + + def setup_stream_options(self, stream_options): + if self._compression_processor is None: + return + self._compression_processor.setup_stream_options(stream_options) + + def set_compression_processor_hook(self, hook): + self._compression_processor_hook = hook + + def get_compression_processor(self): + return self._compression_processor + + +class PerFrameCompressionExtensionProcessor(CompressionExtensionProcessorBase): + """WebSocket Per-frame compression extension processor. + + Specification: + http://tools.ietf.org/html/draft-ietf-hybi-websocket-perframe-compression + """ + + _DEFLATE_METHOD = 'deflate' + + def __init__(self, request): + CompressionExtensionProcessorBase.__init__(self, request) + + def name(self): + return common.PERFRAME_COMPRESSION_EXTENSION + + def _lookup_compression_processor(self, method_desc): + if method_desc.name() == self._DEFLATE_METHOD: + return DeflateFrameExtensionProcessor(method_desc) + return None + + +_available_processors[common.PERFRAME_COMPRESSION_EXTENSION] = ( + PerFrameCompressionExtensionProcessor) + + +class DeflateMessageProcessor(ExtensionProcessorInterface): + """Per-message deflate processor.""" + + _S2C_MAX_WINDOW_BITS_PARAM = 's2c_max_window_bits' + _S2C_NO_CONTEXT_TAKEOVER_PARAM = 's2c_no_context_takeover' + _C2S_MAX_WINDOW_BITS_PARAM = 'c2s_max_window_bits' + _C2S_NO_CONTEXT_TAKEOVER_PARAM = 'c2s_no_context_takeover' + + def __init__(self, request): + self._request = request + self._logger = util.get_class_logger(self) + + self._c2s_max_window_bits = None + self._c2s_no_context_takeover = False + self._bfinal = False + + self._compress_outgoing_enabled = False + + # True if a message is fragmented and compression is ongoing. + self._compress_ongoing = False + + # Counters for statistics. + + # Total number of outgoing bytes supplied to this filter. + self._total_outgoing_payload_bytes = 0 + # Total number of bytes sent to the network after applying this filter. + self._total_filtered_outgoing_payload_bytes = 0 + + # Total number of bytes received from the network. + self._total_incoming_payload_bytes = 0 + # Total number of incoming bytes obtained after applying this filter. + self._total_filtered_incoming_payload_bytes = 0 + + def name(self): + return 'deflate' + + def get_extension_response(self): + # Any unknown parameter will be just ignored. + + s2c_max_window_bits = self._request.get_parameter_value( + self._S2C_MAX_WINDOW_BITS_PARAM) + if s2c_max_window_bits is not None: + try: + s2c_max_window_bits = int(s2c_max_window_bits) + except ValueError, e: + return None + if s2c_max_window_bits < 8 or s2c_max_window_bits > 15: + return None + + s2c_no_context_takeover = self._request.has_parameter( + self._S2C_NO_CONTEXT_TAKEOVER_PARAM) + if (s2c_no_context_takeover and + self._request.get_parameter_value( + self._S2C_NO_CONTEXT_TAKEOVER_PARAM) is not None): + return None + + self._deflater = util._RFC1979Deflater( + s2c_max_window_bits, s2c_no_context_takeover) + + self._inflater = util._RFC1979Inflater() + + self._compress_outgoing_enabled = True + + response = common.ExtensionParameter(self._request.name()) + + if s2c_max_window_bits is not None: + response.add_parameter( + self._S2C_MAX_WINDOW_BITS_PARAM, str(s2c_max_window_bits)) + + if s2c_no_context_takeover: + response.add_parameter( + self._S2C_NO_CONTEXT_TAKEOVER_PARAM, None) + + if self._c2s_max_window_bits is not None: + response.add_parameter( + self._C2S_MAX_WINDOW_BITS_PARAM, + str(self._c2s_max_window_bits)) + if self._c2s_no_context_takeover: + response.add_parameter( + self._C2S_NO_CONTEXT_TAKEOVER_PARAM, None) + + self._logger.debug( + 'Enable %s extension (' + 'request: s2c_max_window_bits=%s; s2c_no_context_takeover=%r, ' + 'response: c2s_max_window_bits=%s; c2s_no_context_takeover=%r)' % + (self._request.name(), + s2c_max_window_bits, + s2c_no_context_takeover, + self._c2s_max_window_bits, + self._c2s_no_context_takeover)) + + return response + + def setup_stream_options(self, stream_options): + class _OutgoingMessageFilter(object): + + def __init__(self, parent): + self._parent = parent + + def filter(self, message, end=True, binary=False): + return self._parent._process_outgoing_message( + message, end, binary) + + class _IncomingMessageFilter(object): + + def __init__(self, parent): + self._parent = parent + self._decompress_next_message = False + + def decompress_next_message(self): + self._decompress_next_message = True + + def filter(self, message): + message = self._parent._process_incoming_message( + message, self._decompress_next_message) + self._decompress_next_message = False + return message + + self._outgoing_message_filter = _OutgoingMessageFilter(self) + self._incoming_message_filter = _IncomingMessageFilter(self) + stream_options.outgoing_message_filters.append( + self._outgoing_message_filter) + stream_options.incoming_message_filters.append( + self._incoming_message_filter) + + class _OutgoingFrameFilter(object): + + def __init__(self, parent): + self._parent = parent + self._set_compression_bit = False + + def set_compression_bit(self): + self._set_compression_bit = True + + def filter(self, frame): + self._parent._process_outgoing_frame( + frame, self._set_compression_bit) + self._set_compression_bit = False + + class _IncomingFrameFilter(object): + + def __init__(self, parent): + self._parent = parent + + def filter(self, frame): + self._parent._process_incoming_frame(frame) + + self._outgoing_frame_filter = _OutgoingFrameFilter(self) + self._incoming_frame_filter = _IncomingFrameFilter(self) + stream_options.outgoing_frame_filters.append( + self._outgoing_frame_filter) + stream_options.incoming_frame_filters.append( + self._incoming_frame_filter) + + stream_options.encode_text_message_to_utf8 = False + + def set_c2s_max_window_bits(self, value): + self._c2s_max_window_bits = value + + def set_c2s_no_context_takeover(self, value): + self._c2s_no_context_takeover = value + + def set_bfinal(self, value): + self._bfinal = value + + def enable_outgoing_compression(self): + self._compress_outgoing_enabled = True + + def disable_outgoing_compression(self): + self._compress_outgoing_enabled = False + + def _process_incoming_message(self, message, decompress): + if not decompress: + return message + + received_payload_size = len(message) + self._total_incoming_payload_bytes += received_payload_size + + message = self._inflater.filter(message) + + filtered_payload_size = len(message) + self._total_filtered_incoming_payload_bytes += filtered_payload_size + + _log_decompression_ratio(self._logger, received_payload_size, + self._total_incoming_payload_bytes, + filtered_payload_size, + self._total_filtered_incoming_payload_bytes) + + return message + + def _process_outgoing_message(self, message, end, binary): + if not binary: + message = message.encode('utf-8') + + if not self._compress_outgoing_enabled: + return message + + original_payload_size = len(message) + self._total_outgoing_payload_bytes += original_payload_size + + message = self._deflater.filter( + message, flush=end, bfinal=self._bfinal) + + filtered_payload_size = len(message) + self._total_filtered_outgoing_payload_bytes += filtered_payload_size + + _log_compression_ratio(self._logger, original_payload_size, + self._total_outgoing_payload_bytes, + filtered_payload_size, + self._total_filtered_outgoing_payload_bytes) + + if not self._compress_ongoing: + self._outgoing_frame_filter.set_compression_bit() + self._compress_ongoing = not end + return message + + def _process_incoming_frame(self, frame): + if frame.rsv1 == 1 and not common.is_control_opcode(frame.opcode): + self._incoming_message_filter.decompress_next_message() + frame.rsv1 = 0 + + def _process_outgoing_frame(self, frame, compression_bit): + if (not compression_bit or + common.is_control_opcode(frame.opcode)): + return + + frame.rsv1 = 1 + + +class PerMessageCompressionExtensionProcessor( + CompressionExtensionProcessorBase): + """WebSocket Per-message compression extension processor. + + Specification: + http://tools.ietf.org/html/draft-ietf-hybi-permessage-compression + """ + + _DEFLATE_METHOD = 'deflate' + + def __init__(self, request): + CompressionExtensionProcessorBase.__init__(self, request) + + def name(self): + return common.PERMESSAGE_COMPRESSION_EXTENSION + + def _lookup_compression_processor(self, method_desc): + if method_desc.name() == self._DEFLATE_METHOD: + return DeflateMessageProcessor(method_desc) + return None + + +_available_processors[common.PERMESSAGE_COMPRESSION_EXTENSION] = ( + PerMessageCompressionExtensionProcessor) + + +# Adding vendor-prefixed permessage-compress extension. +# TODO(bashi): Remove this after WebKit stops using vendor prefix. +_available_processors[common.X_WEBKIT_PERMESSAGE_COMPRESSION_EXTENSION] = ( + PerMessageCompressionExtensionProcessor) + + +class MuxExtensionProcessor(ExtensionProcessorInterface): + """WebSocket multiplexing extension processor.""" + + _QUOTA_PARAM = 'quota' + + def __init__(self, request): + self._request = request + + def name(self): + return common.MUX_EXTENSION + + def get_extension_response(self, ws_request, + logical_channel_extensions): + # Mux extension cannot be used after extensions that depend on + # frame boundary, extension data field, or any reserved bits + # which are attributed to each frame. + for extension in logical_channel_extensions: + name = extension.name() + if (name == common.PERFRAME_COMPRESSION_EXTENSION or + name == common.DEFLATE_FRAME_EXTENSION or + name == common.X_WEBKIT_DEFLATE_FRAME_EXTENSION): + return None + + quota = self._request.get_parameter_value(self._QUOTA_PARAM) + if quota is None: + ws_request.mux_quota = 0 + else: + try: + quota = int(quota) + except ValueError, e: + return None + if quota < 0 or quota >= 2 ** 32: + return None + ws_request.mux_quota = quota + + ws_request.mux = True + ws_request.mux_extensions = logical_channel_extensions + return common.ExtensionParameter(common.MUX_EXTENSION) + + def setup_stream_options(self, stream_options): + pass + + +_available_processors[common.MUX_EXTENSION] = MuxExtensionProcessor + + +def get_extension_processor(extension_request): + global _available_processors + processor_class = _available_processors.get(extension_request.name()) + if processor_class is None: + return None + return processor_class(extension_request) + + +# vi:sts=4 sw=4 et diff --git a/pyload/lib/mod_pywebsocket/handshake/__init__.py b/pyload/lib/mod_pywebsocket/handshake/__init__.py new file mode 100644 index 000000000..194f6b395 --- /dev/null +++ b/pyload/lib/mod_pywebsocket/handshake/__init__.py @@ -0,0 +1,110 @@ +# Copyright 2011, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +"""WebSocket opening handshake processor. This class try to apply available +opening handshake processors for each protocol version until a connection is +successfully established. +""" + + +import logging + +from mod_pywebsocket import common +from mod_pywebsocket.handshake import hybi00 +from mod_pywebsocket.handshake import hybi +# Export AbortedByUserException, HandshakeException, and VersionException +# symbol from this module. +from mod_pywebsocket.handshake._base import AbortedByUserException +from mod_pywebsocket.handshake._base import HandshakeException +from mod_pywebsocket.handshake._base import VersionException + + +_LOGGER = logging.getLogger(__name__) + + +def do_handshake(request, dispatcher, allowDraft75=False, strict=False): + """Performs WebSocket handshake. + + Args: + request: mod_python request. + dispatcher: Dispatcher (dispatch.Dispatcher). + allowDraft75: obsolete argument. ignored. + strict: obsolete argument. ignored. + + Handshaker will add attributes such as ws_resource in performing + handshake. + """ + + _LOGGER.debug('Client\'s opening handshake resource: %r', request.uri) + # To print mimetools.Message as escaped one-line string, we converts + # headers_in to dict object. Without conversion, if we use %r, it just + # prints the type and address, and if we use %s, it prints the original + # header string as multiple lines. + # + # Both mimetools.Message and MpTable_Type of mod_python can be + # converted to dict. + # + # mimetools.Message.__str__ returns the original header string. + # dict(mimetools.Message object) returns the map from header names to + # header values. While MpTable_Type doesn't have such __str__ but just + # __repr__ which formats itself as well as dictionary object. + _LOGGER.debug( + 'Client\'s opening handshake headers: %r', dict(request.headers_in)) + + handshakers = [] + handshakers.append( + ('RFC 6455', hybi.Handshaker(request, dispatcher))) + handshakers.append( + ('HyBi 00', hybi00.Handshaker(request, dispatcher))) + + for name, handshaker in handshakers: + _LOGGER.debug('Trying protocol version %s', name) + try: + handshaker.do_handshake() + _LOGGER.info('Established (%s protocol)', name) + return + except HandshakeException, e: + _LOGGER.debug( + 'Failed to complete opening handshake as %s protocol: %r', + name, e) + if e.status: + raise e + except AbortedByUserException, e: + raise + except VersionException, e: + raise + + # TODO(toyoshim): Add a test to cover the case all handshakers fail. + raise HandshakeException( + 'Failed to complete opening handshake for all available protocols', + status=common.HTTP_STATUS_BAD_REQUEST) + + +# vi:sts=4 sw=4 et diff --git a/pyload/lib/mod_pywebsocket/handshake/_base.py b/pyload/lib/mod_pywebsocket/handshake/_base.py new file mode 100644 index 000000000..e5c94ca90 --- /dev/null +++ b/pyload/lib/mod_pywebsocket/handshake/_base.py @@ -0,0 +1,226 @@ +# Copyright 2012, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +"""Common functions and exceptions used by WebSocket opening handshake +processors. +""" + + +from mod_pywebsocket import common +from mod_pywebsocket import http_header_util + + +class AbortedByUserException(Exception): + """Exception for aborting a connection intentionally. + + If this exception is raised in do_extra_handshake handler, the connection + will be abandoned. No other WebSocket or HTTP(S) handler will be invoked. + + If this exception is raised in transfer_data_handler, the connection will + be closed without closing handshake. No other WebSocket or HTTP(S) handler + will be invoked. + """ + + pass + + +class HandshakeException(Exception): + """This exception will be raised when an error occurred while processing + WebSocket initial handshake. + """ + + def __init__(self, name, status=None): + super(HandshakeException, self).__init__(name) + self.status = status + + +class VersionException(Exception): + """This exception will be raised when a version of client request does not + match with version the server supports. + """ + + def __init__(self, name, supported_versions=''): + """Construct an instance. + + Args: + supported_version: a str object to show supported hybi versions. + (e.g. '8, 13') + """ + super(VersionException, self).__init__(name) + self.supported_versions = supported_versions + + +def get_default_port(is_secure): + if is_secure: + return common.DEFAULT_WEB_SOCKET_SECURE_PORT + else: + return common.DEFAULT_WEB_SOCKET_PORT + + +def validate_subprotocol(subprotocol, hixie): + """Validate a value in the Sec-WebSocket-Protocol field. + + See + - RFC 6455: Section 4.1., 4.2.2., and 4.3. + - HyBi 00: Section 4.1. Opening handshake + + Args: + hixie: if True, checks if characters in subprotocol are in range + between U+0020 and U+007E. It's required by HyBi 00 but not by + RFC 6455. + """ + + if not subprotocol: + raise HandshakeException('Invalid subprotocol name: empty') + if hixie: + # Parameter should be in the range U+0020 to U+007E. + for c in subprotocol: + if not 0x20 <= ord(c) <= 0x7e: + raise HandshakeException( + 'Illegal character in subprotocol name: %r' % c) + else: + # Parameter should be encoded HTTP token. + state = http_header_util.ParsingState(subprotocol) + token = http_header_util.consume_token(state) + rest = http_header_util.peek(state) + # If |rest| is not None, |subprotocol| is not one token or invalid. If + # |rest| is None, |token| must not be None because |subprotocol| is + # concatenation of |token| and |rest| and is not None. + if rest is not None: + raise HandshakeException('Invalid non-token string in subprotocol ' + 'name: %r' % rest) + + +def parse_host_header(request): + fields = request.headers_in['Host'].split(':', 1) + if len(fields) == 1: + return fields[0], get_default_port(request.is_https()) + try: + return fields[0], int(fields[1]) + except ValueError, e: + raise HandshakeException('Invalid port number format: %r' % e) + + +def format_header(name, value): + return '%s: %s\r\n' % (name, value) + + +def build_location(request): + """Build WebSocket location for request.""" + location_parts = [] + if request.is_https(): + location_parts.append(common.WEB_SOCKET_SECURE_SCHEME) + else: + location_parts.append(common.WEB_SOCKET_SCHEME) + location_parts.append('://') + host, port = parse_host_header(request) + connection_port = request.connection.local_addr[1] + if port != connection_port: + raise HandshakeException('Header/connection port mismatch: %d/%d' % + (port, connection_port)) + location_parts.append(host) + if (port != get_default_port(request.is_https())): + location_parts.append(':') + location_parts.append(str(port)) + location_parts.append(request.uri) + return ''.join(location_parts) + + +def get_mandatory_header(request, key): + value = request.headers_in.get(key) + if value is None: + raise HandshakeException('Header %s is not defined' % key) + return value + + +def validate_mandatory_header(request, key, expected_value, fail_status=None): + value = get_mandatory_header(request, key) + + if value.lower() != expected_value.lower(): + raise HandshakeException( + 'Expected %r for header %s but found %r (case-insensitive)' % + (expected_value, key, value), status=fail_status) + + +def check_request_line(request): + # 5.1 1. The three character UTF-8 string "GET". + # 5.1 2. A UTF-8-encoded U+0020 SPACE character (0x20 byte). + if request.method != 'GET': + raise HandshakeException('Method is not GET: %r' % request.method) + + if request.protocol != 'HTTP/1.1': + raise HandshakeException('Version is not HTTP/1.1: %r' % + request.protocol) + + +def check_header_lines(request, mandatory_headers): + check_request_line(request) + + # The expected field names, and the meaning of their corresponding + # values, are as follows. + # |Upgrade| and |Connection| + for key, expected_value in mandatory_headers: + validate_mandatory_header(request, key, expected_value) + + +def parse_token_list(data): + """Parses a header value which follows 1#token and returns parsed elements + as a list of strings. + + Leading LWSes must be trimmed. + """ + + state = http_header_util.ParsingState(data) + + token_list = [] + + while True: + token = http_header_util.consume_token(state) + if token is not None: + token_list.append(token) + + http_header_util.consume_lwses(state) + + if http_header_util.peek(state) is None: + break + + if not http_header_util.consume_string(state, ','): + raise HandshakeException( + 'Expected a comma but found %r' % http_header_util.peek(state)) + + http_header_util.consume_lwses(state) + + if len(token_list) == 0: + raise HandshakeException('No valid token found') + + return token_list + + +# vi:sts=4 sw=4 et diff --git a/pyload/lib/mod_pywebsocket/handshake/hybi.py b/pyload/lib/mod_pywebsocket/handshake/hybi.py new file mode 100644 index 000000000..fc0e2a096 --- /dev/null +++ b/pyload/lib/mod_pywebsocket/handshake/hybi.py @@ -0,0 +1,404 @@ +# Copyright 2012, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +"""This file provides the opening handshake processor for the WebSocket +protocol (RFC 6455). + +Specification: +http://tools.ietf.org/html/rfc6455 +""" + + +# Note: request.connection.write is used in this module, even though mod_python +# document says that it should be used only in connection handlers. +# Unfortunately, we have no other options. For example, request.write is not +# suitable because it doesn't allow direct raw bytes writing. + + +import base64 +import logging +import os +import re + +from mod_pywebsocket import common +from mod_pywebsocket.extensions import get_extension_processor +from mod_pywebsocket.handshake._base import check_request_line +from mod_pywebsocket.handshake._base import format_header +from mod_pywebsocket.handshake._base import get_mandatory_header +from mod_pywebsocket.handshake._base import HandshakeException +from mod_pywebsocket.handshake._base import parse_token_list +from mod_pywebsocket.handshake._base import validate_mandatory_header +from mod_pywebsocket.handshake._base import validate_subprotocol +from mod_pywebsocket.handshake._base import VersionException +from mod_pywebsocket.stream import Stream +from mod_pywebsocket.stream import StreamOptions +from mod_pywebsocket import util + + +# Used to validate the value in the Sec-WebSocket-Key header strictly. RFC 4648 +# disallows non-zero padding, so the character right before == must be any of +# A, Q, g and w. +_SEC_WEBSOCKET_KEY_REGEX = re.compile('^[+/0-9A-Za-z]{21}[AQgw]==$') + +# Defining aliases for values used frequently. +_VERSION_HYBI08 = common.VERSION_HYBI08 +_VERSION_HYBI08_STRING = str(_VERSION_HYBI08) +_VERSION_LATEST = common.VERSION_HYBI_LATEST +_VERSION_LATEST_STRING = str(_VERSION_LATEST) +_SUPPORTED_VERSIONS = [ + _VERSION_LATEST, + _VERSION_HYBI08, +] + + +def compute_accept(key): + """Computes value for the Sec-WebSocket-Accept header from value of the + Sec-WebSocket-Key header. + """ + + accept_binary = util.sha1_hash( + key + common.WEBSOCKET_ACCEPT_UUID).digest() + accept = base64.b64encode(accept_binary) + + return (accept, accept_binary) + + +class Handshaker(object): + """Opening handshake processor for the WebSocket protocol (RFC 6455).""" + + def __init__(self, request, dispatcher): + """Construct an instance. + + Args: + request: mod_python request. + dispatcher: Dispatcher (dispatch.Dispatcher). + + Handshaker will add attributes such as ws_resource during handshake. + """ + + self._logger = util.get_class_logger(self) + + self._request = request + self._dispatcher = dispatcher + + def _validate_connection_header(self): + connection = get_mandatory_header( + self._request, common.CONNECTION_HEADER) + + try: + connection_tokens = parse_token_list(connection) + except HandshakeException, e: + raise HandshakeException( + 'Failed to parse %s: %s' % (common.CONNECTION_HEADER, e)) + + connection_is_valid = False + for token in connection_tokens: + if token.lower() == common.UPGRADE_CONNECTION_TYPE.lower(): + connection_is_valid = True + break + if not connection_is_valid: + raise HandshakeException( + '%s header doesn\'t contain "%s"' % + (common.CONNECTION_HEADER, common.UPGRADE_CONNECTION_TYPE)) + + def do_handshake(self): + self._request.ws_close_code = None + self._request.ws_close_reason = None + + # Parsing. + + check_request_line(self._request) + + validate_mandatory_header( + self._request, + common.UPGRADE_HEADER, + common.WEBSOCKET_UPGRADE_TYPE) + + self._validate_connection_header() + + self._request.ws_resource = self._request.uri + + unused_host = get_mandatory_header(self._request, common.HOST_HEADER) + + self._request.ws_version = self._check_version() + + # This handshake must be based on latest hybi. We are responsible to + # fallback to HTTP on handshake failure as latest hybi handshake + # specifies. + try: + self._get_origin() + self._set_protocol() + self._parse_extensions() + + # Key validation, response generation. + + key = self._get_key() + (accept, accept_binary) = compute_accept(key) + self._logger.debug( + '%s: %r (%s)', + common.SEC_WEBSOCKET_ACCEPT_HEADER, + accept, + util.hexify(accept_binary)) + + self._logger.debug('Protocol version is RFC 6455') + + # Setup extension processors. + + processors = [] + if self._request.ws_requested_extensions is not None: + for extension_request in self._request.ws_requested_extensions: + processor = get_extension_processor(extension_request) + # Unknown extension requests are just ignored. + if processor is not None: + processors.append(processor) + self._request.ws_extension_processors = processors + + # Extra handshake handler may modify/remove processors. + self._dispatcher.do_extra_handshake(self._request) + processors = filter(lambda processor: processor is not None, + self._request.ws_extension_processors) + + accepted_extensions = [] + + # We need to take care of mux extension here. Extensions that + # are placed before mux should be applied to logical channels. + mux_index = -1 + for i, processor in enumerate(processors): + if processor.name() == common.MUX_EXTENSION: + mux_index = i + break + if mux_index >= 0: + mux_processor = processors[mux_index] + logical_channel_processors = processors[:mux_index] + processors = processors[mux_index+1:] + + for processor in logical_channel_processors: + extension_response = processor.get_extension_response() + if extension_response is None: + # Rejected. + continue + accepted_extensions.append(extension_response) + # Pass a shallow copy of accepted_extensions as extensions for + # logical channels. + mux_response = mux_processor.get_extension_response( + self._request, accepted_extensions[:]) + if mux_response is not None: + accepted_extensions.append(mux_response) + + stream_options = StreamOptions() + + # When there is mux extension, here, |processors| contain only + # prosessors for extensions placed after mux. + for processor in processors: + + extension_response = processor.get_extension_response() + if extension_response is None: + # Rejected. + continue + + accepted_extensions.append(extension_response) + + processor.setup_stream_options(stream_options) + + if len(accepted_extensions) > 0: + self._request.ws_extensions = accepted_extensions + self._logger.debug( + 'Extensions accepted: %r', + map(common.ExtensionParameter.name, accepted_extensions)) + else: + self._request.ws_extensions = None + + self._request.ws_stream = self._create_stream(stream_options) + + if self._request.ws_requested_protocols is not None: + if self._request.ws_protocol is None: + raise HandshakeException( + 'do_extra_handshake must choose one subprotocol from ' + 'ws_requested_protocols and set it to ws_protocol') + validate_subprotocol(self._request.ws_protocol, hixie=False) + + self._logger.debug( + 'Subprotocol accepted: %r', + self._request.ws_protocol) + else: + if self._request.ws_protocol is not None: + raise HandshakeException( + 'ws_protocol must be None when the client didn\'t ' + 'request any subprotocol') + + self._send_handshake(accept) + except HandshakeException, e: + if not e.status: + # Fallback to 400 bad request by default. + e.status = common.HTTP_STATUS_BAD_REQUEST + raise e + + def _get_origin(self): + if self._request.ws_version is _VERSION_HYBI08: + origin_header = common.SEC_WEBSOCKET_ORIGIN_HEADER + else: + origin_header = common.ORIGIN_HEADER + origin = self._request.headers_in.get(origin_header) + if origin is None: + self._logger.debug('Client request does not have origin header') + self._request.ws_origin = origin + + def _check_version(self): + version = get_mandatory_header(self._request, + common.SEC_WEBSOCKET_VERSION_HEADER) + if version == _VERSION_HYBI08_STRING: + return _VERSION_HYBI08 + if version == _VERSION_LATEST_STRING: + return _VERSION_LATEST + + if version.find(',') >= 0: + raise HandshakeException( + 'Multiple versions (%r) are not allowed for header %s' % + (version, common.SEC_WEBSOCKET_VERSION_HEADER), + status=common.HTTP_STATUS_BAD_REQUEST) + raise VersionException( + 'Unsupported version %r for header %s' % + (version, common.SEC_WEBSOCKET_VERSION_HEADER), + supported_versions=', '.join(map(str, _SUPPORTED_VERSIONS))) + + def _set_protocol(self): + self._request.ws_protocol = None + + protocol_header = self._request.headers_in.get( + common.SEC_WEBSOCKET_PROTOCOL_HEADER) + + if protocol_header is None: + self._request.ws_requested_protocols = None + return + + self._request.ws_requested_protocols = parse_token_list( + protocol_header) + self._logger.debug('Subprotocols requested: %r', + self._request.ws_requested_protocols) + + def _parse_extensions(self): + extensions_header = self._request.headers_in.get( + common.SEC_WEBSOCKET_EXTENSIONS_HEADER) + if not extensions_header: + self._request.ws_requested_extensions = None + return + + if self._request.ws_version is common.VERSION_HYBI08: + allow_quoted_string=False + else: + allow_quoted_string=True + try: + self._request.ws_requested_extensions = common.parse_extensions( + extensions_header, allow_quoted_string=allow_quoted_string) + except common.ExtensionParsingException, e: + raise HandshakeException( + 'Failed to parse Sec-WebSocket-Extensions header: %r' % e) + + self._logger.debug( + 'Extensions requested: %r', + map(common.ExtensionParameter.name, + self._request.ws_requested_extensions)) + + def _validate_key(self, key): + if key.find(',') >= 0: + raise HandshakeException('Request has multiple %s header lines or ' + 'contains illegal character \',\': %r' % + (common.SEC_WEBSOCKET_KEY_HEADER, key)) + + # Validate + key_is_valid = False + try: + # Validate key by quick regex match before parsing by base64 + # module. Because base64 module skips invalid characters, we have + # to do this in advance to make this server strictly reject illegal + # keys. + if _SEC_WEBSOCKET_KEY_REGEX.match(key): + decoded_key = base64.b64decode(key) + if len(decoded_key) == 16: + key_is_valid = True + except TypeError, e: + pass + + if not key_is_valid: + raise HandshakeException( + 'Illegal value for header %s: %r' % + (common.SEC_WEBSOCKET_KEY_HEADER, key)) + + return decoded_key + + def _get_key(self): + key = get_mandatory_header( + self._request, common.SEC_WEBSOCKET_KEY_HEADER) + + decoded_key = self._validate_key(key) + + self._logger.debug( + '%s: %r (%s)', + common.SEC_WEBSOCKET_KEY_HEADER, + key, + util.hexify(decoded_key)) + + return key + + def _create_stream(self, stream_options): + return Stream(self._request, stream_options) + + def _create_handshake_response(self, accept): + response = [] + + response.append('HTTP/1.1 101 Switching Protocols\r\n') + + response.append(format_header( + common.UPGRADE_HEADER, common.WEBSOCKET_UPGRADE_TYPE)) + response.append(format_header( + common.CONNECTION_HEADER, common.UPGRADE_CONNECTION_TYPE)) + response.append(format_header( + common.SEC_WEBSOCKET_ACCEPT_HEADER, accept)) + if self._request.ws_protocol is not None: + response.append(format_header( + common.SEC_WEBSOCKET_PROTOCOL_HEADER, + self._request.ws_protocol)) + if (self._request.ws_extensions is not None and + len(self._request.ws_extensions) != 0): + response.append(format_header( + common.SEC_WEBSOCKET_EXTENSIONS_HEADER, + common.format_extensions(self._request.ws_extensions))) + response.append('\r\n') + + return ''.join(response) + + def _send_handshake(self, accept): + raw_response = self._create_handshake_response(accept) + self._request.connection.write(raw_response) + self._logger.debug('Sent server\'s opening handshake: %r', + raw_response) + + +# vi:sts=4 sw=4 et diff --git a/pyload/lib/mod_pywebsocket/handshake/hybi00.py b/pyload/lib/mod_pywebsocket/handshake/hybi00.py new file mode 100644 index 000000000..cc6f8dc43 --- /dev/null +++ b/pyload/lib/mod_pywebsocket/handshake/hybi00.py @@ -0,0 +1,242 @@ +# Copyright 2011, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +"""This file provides the opening handshake processor for the WebSocket +protocol version HyBi 00. + +Specification: +http://tools.ietf.org/html/draft-ietf-hybi-thewebsocketprotocol-00 +""" + + +# Note: request.connection.write/read are used in this module, even though +# mod_python document says that they should be used only in connection +# handlers. Unfortunately, we have no other options. For example, +# request.write/read are not suitable because they don't allow direct raw bytes +# writing/reading. + + +import logging +import re +import struct + +from mod_pywebsocket import common +from mod_pywebsocket.stream import StreamHixie75 +from mod_pywebsocket import util +from mod_pywebsocket.handshake._base import HandshakeException +from mod_pywebsocket.handshake._base import build_location +from mod_pywebsocket.handshake._base import check_header_lines +from mod_pywebsocket.handshake._base import format_header +from mod_pywebsocket.handshake._base import get_mandatory_header +from mod_pywebsocket.handshake._base import validate_subprotocol + + +_MANDATORY_HEADERS = [ + # key, expected value or None + [common.UPGRADE_HEADER, common.WEBSOCKET_UPGRADE_TYPE_HIXIE75], + [common.CONNECTION_HEADER, common.UPGRADE_CONNECTION_TYPE], +] + + +class Handshaker(object): + """Opening handshake processor for the WebSocket protocol version HyBi 00. + """ + + def __init__(self, request, dispatcher): + """Construct an instance. + + Args: + request: mod_python request. + dispatcher: Dispatcher (dispatch.Dispatcher). + + Handshaker will add attributes such as ws_resource in performing + handshake. + """ + + self._logger = util.get_class_logger(self) + + self._request = request + self._dispatcher = dispatcher + + def do_handshake(self): + """Perform WebSocket Handshake. + + On _request, we set + ws_resource, ws_protocol, ws_location, ws_origin, ws_challenge, + ws_challenge_md5: WebSocket handshake information. + ws_stream: Frame generation/parsing class. + ws_version: Protocol version. + + Raises: + HandshakeException: when any error happened in parsing the opening + handshake request. + """ + + # 5.1 Reading the client's opening handshake. + # dispatcher sets it in self._request. + check_header_lines(self._request, _MANDATORY_HEADERS) + self._set_resource() + self._set_subprotocol() + self._set_location() + self._set_origin() + self._set_challenge_response() + self._set_protocol_version() + + self._dispatcher.do_extra_handshake(self._request) + + self._send_handshake() + + def _set_resource(self): + self._request.ws_resource = self._request.uri + + def _set_subprotocol(self): + # |Sec-WebSocket-Protocol| + subprotocol = self._request.headers_in.get( + common.SEC_WEBSOCKET_PROTOCOL_HEADER) + if subprotocol is not None: + validate_subprotocol(subprotocol, hixie=True) + self._request.ws_protocol = subprotocol + + def _set_location(self): + # |Host| + host = self._request.headers_in.get(common.HOST_HEADER) + if host is not None: + self._request.ws_location = build_location(self._request) + # TODO(ukai): check host is this host. + + def _set_origin(self): + # |Origin| + origin = self._request.headers_in.get(common.ORIGIN_HEADER) + if origin is not None: + self._request.ws_origin = origin + + def _set_protocol_version(self): + # |Sec-WebSocket-Draft| + draft = self._request.headers_in.get(common.SEC_WEBSOCKET_DRAFT_HEADER) + if draft is not None and draft != '0': + raise HandshakeException('Illegal value for %s: %s' % + (common.SEC_WEBSOCKET_DRAFT_HEADER, + draft)) + + self._logger.debug('Protocol version is HyBi 00') + self._request.ws_version = common.VERSION_HYBI00 + self._request.ws_stream = StreamHixie75(self._request, True) + + def _set_challenge_response(self): + # 5.2 4-8. + self._request.ws_challenge = self._get_challenge() + # 5.2 9. let /response/ be the MD5 finterprint of /challenge/ + self._request.ws_challenge_md5 = util.md5_hash( + self._request.ws_challenge).digest() + self._logger.debug( + 'Challenge: %r (%s)', + self._request.ws_challenge, + util.hexify(self._request.ws_challenge)) + self._logger.debug( + 'Challenge response: %r (%s)', + self._request.ws_challenge_md5, + util.hexify(self._request.ws_challenge_md5)) + + def _get_key_value(self, key_field): + key_value = get_mandatory_header(self._request, key_field) + + self._logger.debug('%s: %r', key_field, key_value) + + # 5.2 4. let /key-number_n/ be the digits (characters in the range + # U+0030 DIGIT ZERO (0) to U+0039 DIGIT NINE (9)) in /key_n/, + # interpreted as a base ten integer, ignoring all other characters + # in /key_n/. + try: + key_number = int(re.sub("\\D", "", key_value)) + except: + raise HandshakeException('%s field contains no digit' % key_field) + # 5.2 5. let /spaces_n/ be the number of U+0020 SPACE characters + # in /key_n/. + spaces = re.subn(" ", "", key_value)[1] + if spaces == 0: + raise HandshakeException('%s field contains no space' % key_field) + + self._logger.debug( + '%s: Key-number is %d and number of spaces is %d', + key_field, key_number, spaces) + + # 5.2 6. if /key-number_n/ is not an integral multiple of /spaces_n/ + # then abort the WebSocket connection. + if key_number % spaces != 0: + raise HandshakeException( + '%s: Key-number (%d) is not an integral multiple of spaces ' + '(%d)' % (key_field, key_number, spaces)) + # 5.2 7. let /part_n/ be /key-number_n/ divided by /spaces_n/. + part = key_number / spaces + self._logger.debug('%s: Part is %d', key_field, part) + return part + + def _get_challenge(self): + # 5.2 4-7. + key1 = self._get_key_value(common.SEC_WEBSOCKET_KEY1_HEADER) + key2 = self._get_key_value(common.SEC_WEBSOCKET_KEY2_HEADER) + # 5.2 8. let /challenge/ be the concatenation of /part_1/, + challenge = '' + challenge += struct.pack('!I', key1) # network byteorder int + challenge += struct.pack('!I', key2) # network byteorder int + challenge += self._request.connection.read(8) + return challenge + + def _send_handshake(self): + response = [] + + # 5.2 10. send the following line. + response.append('HTTP/1.1 101 WebSocket Protocol Handshake\r\n') + + # 5.2 11. send the following fields to the client. + response.append(format_header( + common.UPGRADE_HEADER, common.WEBSOCKET_UPGRADE_TYPE_HIXIE75)) + response.append(format_header( + common.CONNECTION_HEADER, common.UPGRADE_CONNECTION_TYPE)) + response.append(format_header( + common.SEC_WEBSOCKET_LOCATION_HEADER, self._request.ws_location)) + response.append(format_header( + common.SEC_WEBSOCKET_ORIGIN_HEADER, self._request.ws_origin)) + if self._request.ws_protocol: + response.append(format_header( + common.SEC_WEBSOCKET_PROTOCOL_HEADER, + self._request.ws_protocol)) + # 5.2 12. send two bytes 0x0D 0x0A. + response.append('\r\n') + # 5.2 13. send /response/ + response.append(self._request.ws_challenge_md5) + + raw_response = ''.join(response) + self._request.connection.write(raw_response) + self._logger.debug('Sent server\'s opening handshake: %r', + raw_response) + + +# vi:sts=4 sw=4 et diff --git a/pyload/lib/mod_pywebsocket/headerparserhandler.py b/pyload/lib/mod_pywebsocket/headerparserhandler.py new file mode 100644 index 000000000..2cc62de04 --- /dev/null +++ b/pyload/lib/mod_pywebsocket/headerparserhandler.py @@ -0,0 +1,244 @@ +# Copyright 2011, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +"""PythonHeaderParserHandler for mod_pywebsocket. + +Apache HTTP Server and mod_python must be configured such that this +function is called to handle WebSocket request. +""" + + +import logging + +from mod_python import apache + +from mod_pywebsocket import common +from mod_pywebsocket import dispatch +from mod_pywebsocket import handshake +from mod_pywebsocket import util + + +# PythonOption to specify the handler root directory. +_PYOPT_HANDLER_ROOT = 'mod_pywebsocket.handler_root' + +# PythonOption to specify the handler scan directory. +# This must be a directory under the root directory. +# The default is the root directory. +_PYOPT_HANDLER_SCAN = 'mod_pywebsocket.handler_scan' + +# PythonOption to allow handlers whose canonical path is +# not under the root directory. It's disallowed by default. +# Set this option with value of 'yes' to allow. +_PYOPT_ALLOW_HANDLERS_OUTSIDE_ROOT = ( + 'mod_pywebsocket.allow_handlers_outside_root_dir') +# Map from values to their meanings. 'Yes' and 'No' are allowed just for +# compatibility. +_PYOPT_ALLOW_HANDLERS_OUTSIDE_ROOT_DEFINITION = { + 'off': False, 'no': False, 'on': True, 'yes': True} + +# (Obsolete option. Ignored.) +# PythonOption to specify to allow handshake defined in Hixie 75 version +# protocol. The default is None (Off) +_PYOPT_ALLOW_DRAFT75 = 'mod_pywebsocket.allow_draft75' +# Map from values to their meanings. +_PYOPT_ALLOW_DRAFT75_DEFINITION = {'off': False, 'on': True} + + +class ApacheLogHandler(logging.Handler): + """Wrapper logging.Handler to emit log message to apache's error.log.""" + + _LEVELS = { + logging.DEBUG: apache.APLOG_DEBUG, + logging.INFO: apache.APLOG_INFO, + logging.WARNING: apache.APLOG_WARNING, + logging.ERROR: apache.APLOG_ERR, + logging.CRITICAL: apache.APLOG_CRIT, + } + + def __init__(self, request=None): + logging.Handler.__init__(self) + self._log_error = apache.log_error + if request is not None: + self._log_error = request.log_error + + # Time and level will be printed by Apache. + self._formatter = logging.Formatter('%(name)s: %(message)s') + + def emit(self, record): + apache_level = apache.APLOG_DEBUG + if record.levelno in ApacheLogHandler._LEVELS: + apache_level = ApacheLogHandler._LEVELS[record.levelno] + + msg = self._formatter.format(record) + + # "server" parameter must be passed to have "level" parameter work. + # If only "level" parameter is passed, nothing shows up on Apache's + # log. However, at this point, we cannot get the server object of the + # virtual host which will process WebSocket requests. The only server + # object we can get here is apache.main_server. But Wherever (server + # configuration context or virtual host context) we put + # PythonHeaderParserHandler directive, apache.main_server just points + # the main server instance (not any of virtual server instance). Then, + # Apache follows LogLevel directive in the server configuration context + # to filter logs. So, we need to specify LogLevel in the server + # configuration context. Even if we specify "LogLevel debug" in the + # virtual host context which actually handles WebSocket connections, + # DEBUG level logs never show up unless "LogLevel debug" is specified + # in the server configuration context. + # + # TODO(tyoshino): Provide logging methods on request object. When + # request is mp_request object (when used together with Apache), the + # methods call request.log_error indirectly. When request is + # _StandaloneRequest, the methods call Python's logging facility which + # we create in standalone.py. + self._log_error(msg, apache_level, apache.main_server) + + +def _configure_logging(): + logger = logging.getLogger() + # Logs are filtered by Apache based on LogLevel directive in Apache + # configuration file. We must just pass logs for all levels to + # ApacheLogHandler. + logger.setLevel(logging.DEBUG) + logger.addHandler(ApacheLogHandler()) + + +_configure_logging() + +_LOGGER = logging.getLogger(__name__) + + +def _parse_option(name, value, definition): + if value is None: + return False + + meaning = definition.get(value.lower()) + if meaning is None: + raise Exception('Invalid value for PythonOption %s: %r' % + (name, value)) + return meaning + + +def _create_dispatcher(): + _LOGGER.info('Initializing Dispatcher') + + options = apache.main_server.get_options() + + handler_root = options.get(_PYOPT_HANDLER_ROOT, None) + if not handler_root: + raise Exception('PythonOption %s is not defined' % _PYOPT_HANDLER_ROOT, + apache.APLOG_ERR) + + handler_scan = options.get(_PYOPT_HANDLER_SCAN, handler_root) + + allow_handlers_outside_root = _parse_option( + _PYOPT_ALLOW_HANDLERS_OUTSIDE_ROOT, + options.get(_PYOPT_ALLOW_HANDLERS_OUTSIDE_ROOT), + _PYOPT_ALLOW_HANDLERS_OUTSIDE_ROOT_DEFINITION) + + dispatcher = dispatch.Dispatcher( + handler_root, handler_scan, allow_handlers_outside_root) + + for warning in dispatcher.source_warnings(): + apache.log_error('mod_pywebsocket: %s' % warning, apache.APLOG_WARNING) + + return dispatcher + + +# Initialize +_dispatcher = _create_dispatcher() + + +def headerparserhandler(request): + """Handle request. + + Args: + request: mod_python request. + + This function is named headerparserhandler because it is the default + name for a PythonHeaderParserHandler. + """ + + handshake_is_done = False + try: + # Fallback to default http handler for request paths for which + # we don't have request handlers. + if not _dispatcher.get_handler_suite(request.uri): + request.log_error('No handler for resource: %r' % request.uri, + apache.APLOG_INFO) + request.log_error('Fallback to Apache', apache.APLOG_INFO) + return apache.DECLINED + except dispatch.DispatchException, e: + request.log_error('mod_pywebsocket: %s' % e, apache.APLOG_INFO) + if not handshake_is_done: + return e.status + + try: + allow_draft75 = _parse_option( + _PYOPT_ALLOW_DRAFT75, + apache.main_server.get_options().get(_PYOPT_ALLOW_DRAFT75), + _PYOPT_ALLOW_DRAFT75_DEFINITION) + + try: + handshake.do_handshake( + request, _dispatcher, allowDraft75=allow_draft75) + except handshake.VersionException, e: + request.log_error('mod_pywebsocket: %s' % e, apache.APLOG_INFO) + request.err_headers_out.add(common.SEC_WEBSOCKET_VERSION_HEADER, + e.supported_versions) + return apache.HTTP_BAD_REQUEST + except handshake.HandshakeException, e: + # Handshake for ws/wss failed. + # Send http response with error status. + request.log_error('mod_pywebsocket: %s' % e, apache.APLOG_INFO) + return e.status + + handshake_is_done = True + request._dispatcher = _dispatcher + _dispatcher.transfer_data(request) + except handshake.AbortedByUserException, e: + request.log_error('mod_pywebsocket: %s' % e, apache.APLOG_INFO) + except Exception, e: + # DispatchException can also be thrown if something is wrong in + # pywebsocket code. It's caught here, then. + + request.log_error('mod_pywebsocket: %s\n%s' % + (e, util.get_stack_trace()), + apache.APLOG_ERR) + # Unknown exceptions before handshake mean Apache must handle its + # request with another handler. + if not handshake_is_done: + return apache.DECLINED + # Set assbackwards to suppress response header generation by Apache. + request.assbackwards = 1 + return apache.DONE # Return DONE such that no other handlers are invoked. + + +# vi:sts=4 sw=4 et diff --git a/pyload/lib/mod_pywebsocket/http_header_util.py b/pyload/lib/mod_pywebsocket/http_header_util.py new file mode 100644 index 000000000..b77465393 --- /dev/null +++ b/pyload/lib/mod_pywebsocket/http_header_util.py @@ -0,0 +1,263 @@ +# Copyright 2011, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +"""Utilities for parsing and formatting headers that follow the grammar defined +in HTTP RFC http://www.ietf.org/rfc/rfc2616.txt. +""" + + +import urlparse + + +_SEPARATORS = '()<>@,;:\\"/[]?={} \t' + + +def _is_char(c): + """Returns true iff c is in CHAR as specified in HTTP RFC.""" + + return ord(c) <= 127 + + +def _is_ctl(c): + """Returns true iff c is in CTL as specified in HTTP RFC.""" + + return ord(c) <= 31 or ord(c) == 127 + + +class ParsingState(object): + + def __init__(self, data): + self.data = data + self.head = 0 + + +def peek(state, pos=0): + """Peeks the character at pos from the head of data.""" + + if state.head + pos >= len(state.data): + return None + + return state.data[state.head + pos] + + +def consume(state, amount=1): + """Consumes specified amount of bytes from the head and returns the + consumed bytes. If there's not enough bytes to consume, returns None. + """ + + if state.head + amount > len(state.data): + return None + + result = state.data[state.head:state.head + amount] + state.head = state.head + amount + return result + + +def consume_string(state, expected): + """Given a parsing state and a expected string, consumes the string from + the head. Returns True if consumed successfully. Otherwise, returns + False. + """ + + pos = 0 + + for c in expected: + if c != peek(state, pos): + return False + pos += 1 + + consume(state, pos) + return True + + +def consume_lws(state): + """Consumes a LWS from the head. Returns True if any LWS is consumed. + Otherwise, returns False. + + LWS = [CRLF] 1*( SP | HT ) + """ + + original_head = state.head + + consume_string(state, '\r\n') + + pos = 0 + + while True: + c = peek(state, pos) + if c == ' ' or c == '\t': + pos += 1 + else: + if pos == 0: + state.head = original_head + return False + else: + consume(state, pos) + return True + + +def consume_lwses(state): + """Consumes *LWS from the head.""" + + while consume_lws(state): + pass + + +def consume_token(state): + """Consumes a token from the head. Returns the token or None if no token + was found. + """ + + pos = 0 + + while True: + c = peek(state, pos) + if c is None or c in _SEPARATORS or _is_ctl(c) or not _is_char(c): + if pos == 0: + return None + + return consume(state, pos) + else: + pos += 1 + + +def consume_token_or_quoted_string(state): + """Consumes a token or a quoted-string, and returns the token or unquoted + string. If no token or quoted-string was found, returns None. + """ + + original_head = state.head + + if not consume_string(state, '"'): + return consume_token(state) + + result = [] + + expect_quoted_pair = False + + while True: + if not expect_quoted_pair and consume_lws(state): + result.append(' ') + continue + + c = consume(state) + if c is None: + # quoted-string is not enclosed with double quotation + state.head = original_head + return None + elif expect_quoted_pair: + expect_quoted_pair = False + if _is_char(c): + result.append(c) + else: + # Non CHAR character found in quoted-pair + state.head = original_head + return None + elif c == '\\': + expect_quoted_pair = True + elif c == '"': + return ''.join(result) + elif _is_ctl(c): + # Invalid character %r found in qdtext + state.head = original_head + return None + else: + result.append(c) + + +def quote_if_necessary(s): + """Quotes arbitrary string into quoted-string.""" + + quote = False + if s == '': + return '""' + + result = [] + for c in s: + if c == '"' or c in _SEPARATORS or _is_ctl(c) or not _is_char(c): + quote = True + + if c == '"' or _is_ctl(c): + result.append('\\' + c) + else: + result.append(c) + + if quote: + return '"' + ''.join(result) + '"' + else: + return ''.join(result) + + +def parse_uri(uri): + """Parse absolute URI then return host, port and resource.""" + + parsed = urlparse.urlsplit(uri) + if parsed.scheme != 'wss' and parsed.scheme != 'ws': + # |uri| must be a relative URI. + # TODO(toyoshim): Should validate |uri|. + return None, None, uri + + if parsed.hostname is None: + return None, None, None + + port = None + try: + port = parsed.port + except ValueError, e: + # port property cause ValueError on invalid null port description like + # 'ws://host:/path'. + return None, None, None + + if port is None: + if parsed.scheme == 'ws': + port = 80 + else: + port = 443 + + path = parsed.path + if not path: + path += '/' + if parsed.query: + path += '?' + parsed.query + if parsed.fragment: + path += '#' + parsed.fragment + + return parsed.hostname, port, path + + +try: + urlparse.uses_netloc.index('ws') +except ValueError, e: + # urlparse in Python2.5.1 doesn't have 'ws' and 'wss' entries. + urlparse.uses_netloc.append('ws') + urlparse.uses_netloc.append('wss') + + +# vi:sts=4 sw=4 et diff --git a/pyload/lib/mod_pywebsocket/memorizingfile.py b/pyload/lib/mod_pywebsocket/memorizingfile.py new file mode 100644 index 000000000..4d4cd9585 --- /dev/null +++ b/pyload/lib/mod_pywebsocket/memorizingfile.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python +# +# Copyright 2011, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +"""Memorizing file. + +A memorizing file wraps a file and memorizes lines read by readline. +""" + + +import sys + + +class MemorizingFile(object): + """MemorizingFile wraps a file and memorizes lines read by readline. + + Note that data read by other methods are not memorized. This behavior + is good enough for memorizing lines SimpleHTTPServer reads before + the control reaches WebSocketRequestHandler. + """ + + def __init__(self, file_, max_memorized_lines=sys.maxint): + """Construct an instance. + + Args: + file_: the file object to wrap. + max_memorized_lines: the maximum number of lines to memorize. + Only the first max_memorized_lines are memorized. + Default: sys.maxint. + """ + + self._file = file_ + self._memorized_lines = [] + self._max_memorized_lines = max_memorized_lines + self._buffered = False + self._buffered_line = None + + def __getattribute__(self, name): + if name in ('_file', '_memorized_lines', '_max_memorized_lines', + '_buffered', '_buffered_line', 'readline', + 'get_memorized_lines'): + return object.__getattribute__(self, name) + return self._file.__getattribute__(name) + + def readline(self, size=-1): + """Override file.readline and memorize the line read. + + Note that even if size is specified and smaller than actual size, + the whole line will be read out from underlying file object by + subsequent readline calls. + """ + + if self._buffered: + line = self._buffered_line + self._buffered = False + else: + line = self._file.readline() + if line and len(self._memorized_lines) < self._max_memorized_lines: + self._memorized_lines.append(line) + if size >= 0 and size < len(line): + self._buffered = True + self._buffered_line = line[size:] + return line[:size] + return line + + def get_memorized_lines(self): + """Get lines memorized so far.""" + return self._memorized_lines + + +# vi:sts=4 sw=4 et diff --git a/pyload/lib/mod_pywebsocket/msgutil.py b/pyload/lib/mod_pywebsocket/msgutil.py new file mode 100644 index 000000000..4c1a0114b --- /dev/null +++ b/pyload/lib/mod_pywebsocket/msgutil.py @@ -0,0 +1,219 @@ +# Copyright 2011, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +"""Message related utilities. + +Note: request.connection.write/read are used in this module, even though +mod_python document says that they should be used only in connection +handlers. Unfortunately, we have no other options. For example, +request.write/read are not suitable because they don't allow direct raw +bytes writing/reading. +""" + + +import Queue +import threading + + +# Export Exception symbols from msgutil for backward compatibility +from mod_pywebsocket._stream_base import ConnectionTerminatedException +from mod_pywebsocket._stream_base import InvalidFrameException +from mod_pywebsocket._stream_base import BadOperationException +from mod_pywebsocket._stream_base import UnsupportedFrameException + + +# An API for handler to send/receive WebSocket messages. +def close_connection(request): + """Close connection. + + Args: + request: mod_python request. + """ + request.ws_stream.close_connection() + + +def send_message(request, payload_data, end=True, binary=False): + """Send a message (or part of a message). + + Args: + request: mod_python request. + payload_data: unicode text or str binary to send. + end: True to terminate a message. + False to send payload_data as part of a message that is to be + terminated by next or later send_message call with end=True. + binary: send payload_data as binary frame(s). + Raises: + BadOperationException: when server already terminated. + """ + request.ws_stream.send_message(payload_data, end, binary) + + +def receive_message(request): + """Receive a WebSocket frame and return its payload as a text in + unicode or a binary in str. + + Args: + request: mod_python request. + Raises: + InvalidFrameException: when client send invalid frame. + UnsupportedFrameException: when client send unsupported frame e.g. some + of reserved bit is set but no extension can + recognize it. + InvalidUTF8Exception: when client send a text frame containing any + invalid UTF-8 string. + ConnectionTerminatedException: when the connection is closed + unexpectedly. + BadOperationException: when client already terminated. + """ + return request.ws_stream.receive_message() + + +def send_ping(request, body=''): + request.ws_stream.send_ping(body) + + +class MessageReceiver(threading.Thread): + """This class receives messages from the client. + + This class provides three ways to receive messages: blocking, + non-blocking, and via callback. Callback has the highest precedence. + + Note: This class should not be used with the standalone server for wss + because pyOpenSSL used by the server raises a fatal error if the socket + is accessed from multiple threads. + """ + + def __init__(self, request, onmessage=None): + """Construct an instance. + + Args: + request: mod_python request. + onmessage: a function to be called when a message is received. + May be None. If not None, the function is called on + another thread. In that case, MessageReceiver.receive + and MessageReceiver.receive_nowait are useless + because they will never return any messages. + """ + + threading.Thread.__init__(self) + self._request = request + self._queue = Queue.Queue() + self._onmessage = onmessage + self._stop_requested = False + self.setDaemon(True) + self.start() + + def run(self): + try: + while not self._stop_requested: + message = receive_message(self._request) + if self._onmessage: + self._onmessage(message) + else: + self._queue.put(message) + finally: + close_connection(self._request) + + def receive(self): + """ Receive a message from the channel, blocking. + + Returns: + message as a unicode string. + """ + return self._queue.get() + + def receive_nowait(self): + """ Receive a message from the channel, non-blocking. + + Returns: + message as a unicode string if available. None otherwise. + """ + try: + message = self._queue.get_nowait() + except Queue.Empty: + message = None + return message + + def stop(self): + """Request to stop this instance. + + The instance will be stopped after receiving the next message. + This method may not be very useful, but there is no clean way + in Python to forcefully stop a running thread. + """ + self._stop_requested = True + + +class MessageSender(threading.Thread): + """This class sends messages to the client. + + This class provides both synchronous and asynchronous ways to send + messages. + + Note: This class should not be used with the standalone server for wss + because pyOpenSSL used by the server raises a fatal error if the socket + is accessed from multiple threads. + """ + + def __init__(self, request): + """Construct an instance. + + Args: + request: mod_python request. + """ + threading.Thread.__init__(self) + self._request = request + self._queue = Queue.Queue() + self.setDaemon(True) + self.start() + + def run(self): + while True: + message, condition = self._queue.get() + condition.acquire() + send_message(self._request, message) + condition.notify() + condition.release() + + def send(self, message): + """Send a message, blocking.""" + + condition = threading.Condition() + condition.acquire() + self._queue.put((message, condition)) + condition.wait() + + def send_nowait(self, message): + """Send a message, non-blocking.""" + + self._queue.put((message, threading.Condition())) + + +# vi:sts=4 sw=4 et diff --git a/pyload/lib/mod_pywebsocket/mux.py b/pyload/lib/mod_pywebsocket/mux.py new file mode 100644 index 000000000..f0bdd2461 --- /dev/null +++ b/pyload/lib/mod_pywebsocket/mux.py @@ -0,0 +1,1636 @@ +# Copyright 2012, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +"""This file provides classes and helper functions for multiplexing extension. + +Specification: +http://tools.ietf.org/html/draft-ietf-hybi-websocket-multiplexing-06 +""" + + +import collections +import copy +import email +import email.parser +import logging +import math +import struct +import threading +import traceback + +from mod_pywebsocket import common +from mod_pywebsocket import handshake +from mod_pywebsocket import util +from mod_pywebsocket._stream_base import BadOperationException +from mod_pywebsocket._stream_base import ConnectionTerminatedException +from mod_pywebsocket._stream_hybi import Frame +from mod_pywebsocket._stream_hybi import Stream +from mod_pywebsocket._stream_hybi import StreamOptions +from mod_pywebsocket._stream_hybi import create_binary_frame +from mod_pywebsocket._stream_hybi import create_closing_handshake_body +from mod_pywebsocket._stream_hybi import create_header +from mod_pywebsocket._stream_hybi import create_length_header +from mod_pywebsocket._stream_hybi import parse_frame +from mod_pywebsocket.handshake import hybi + + +_CONTROL_CHANNEL_ID = 0 +_DEFAULT_CHANNEL_ID = 1 + +_MUX_OPCODE_ADD_CHANNEL_REQUEST = 0 +_MUX_OPCODE_ADD_CHANNEL_RESPONSE = 1 +_MUX_OPCODE_FLOW_CONTROL = 2 +_MUX_OPCODE_DROP_CHANNEL = 3 +_MUX_OPCODE_NEW_CHANNEL_SLOT = 4 + +_MAX_CHANNEL_ID = 2 ** 29 - 1 + +_INITIAL_NUMBER_OF_CHANNEL_SLOTS = 64 +_INITIAL_QUOTA_FOR_CLIENT = 8 * 1024 + +_HANDSHAKE_ENCODING_IDENTITY = 0 +_HANDSHAKE_ENCODING_DELTA = 1 + +# We need only these status code for now. +_HTTP_BAD_RESPONSE_MESSAGES = { + common.HTTP_STATUS_BAD_REQUEST: 'Bad Request', +} + +# DropChannel reason code +# TODO(bashi): Define all reason code defined in -05 draft. +_DROP_CODE_NORMAL_CLOSURE = 1000 + +_DROP_CODE_INVALID_ENCAPSULATING_MESSAGE = 2001 +_DROP_CODE_CHANNEL_ID_TRUNCATED = 2002 +_DROP_CODE_ENCAPSULATED_FRAME_IS_TRUNCATED = 2003 +_DROP_CODE_UNKNOWN_MUX_OPCODE = 2004 +_DROP_CODE_INVALID_MUX_CONTROL_BLOCK = 2005 +_DROP_CODE_CHANNEL_ALREADY_EXISTS = 2006 +_DROP_CODE_NEW_CHANNEL_SLOT_VIOLATION = 2007 + +_DROP_CODE_UNKNOWN_REQUEST_ENCODING = 3002 +_DROP_CODE_SEND_QUOTA_VIOLATION = 3005 +_DROP_CODE_ACKNOWLEDGED = 3008 + + +class MuxUnexpectedException(Exception): + """Exception in handling multiplexing extension.""" + pass + + +# Temporary +class MuxNotImplementedException(Exception): + """Raised when a flow enters unimplemented code path.""" + pass + + +class LogicalConnectionClosedException(Exception): + """Raised when logical connection is gracefully closed.""" + pass + + +class PhysicalConnectionError(Exception): + """Raised when there is a physical connection error.""" + def __init__(self, drop_code, message=''): + super(PhysicalConnectionError, self).__init__( + 'code=%d, message=%r' % (drop_code, message)) + self.drop_code = drop_code + self.message = message + + +class LogicalChannelError(Exception): + """Raised when there is a logical channel error.""" + def __init__(self, channel_id, drop_code, message=''): + super(LogicalChannelError, self).__init__( + 'channel_id=%d, code=%d, message=%r' % ( + channel_id, drop_code, message)) + self.channel_id = channel_id + self.drop_code = drop_code + self.message = message + + +def _encode_channel_id(channel_id): + if channel_id < 0: + raise ValueError('Channel id %d must not be negative' % channel_id) + + if channel_id < 2 ** 7: + return chr(channel_id) + if channel_id < 2 ** 14: + return struct.pack('!H', 0x8000 + channel_id) + if channel_id < 2 ** 21: + first = chr(0xc0 + (channel_id >> 16)) + return first + struct.pack('!H', channel_id & 0xffff) + if channel_id < 2 ** 29: + return struct.pack('!L', 0xe0000000 + channel_id) + + raise ValueError('Channel id %d is too large' % channel_id) + + +def _encode_number(number): + return create_length_header(number, False) + + +def _create_add_channel_response(channel_id, encoded_handshake, + encoding=0, rejected=False, + outer_frame_mask=False): + if encoding != 0 and encoding != 1: + raise ValueError('Invalid encoding %d' % encoding) + + first_byte = ((_MUX_OPCODE_ADD_CHANNEL_RESPONSE << 5) | + (rejected << 4) | encoding) + block = (chr(first_byte) + + _encode_channel_id(channel_id) + + _encode_number(len(encoded_handshake)) + + encoded_handshake) + payload = _encode_channel_id(_CONTROL_CHANNEL_ID) + block + return create_binary_frame(payload, mask=outer_frame_mask) + + +def _create_drop_channel(channel_id, code=None, message='', + outer_frame_mask=False): + if len(message) > 0 and code is None: + raise ValueError('Code must be specified if message is specified') + + first_byte = _MUX_OPCODE_DROP_CHANNEL << 5 + block = chr(first_byte) + _encode_channel_id(channel_id) + if code is None: + block += _encode_number(0) # Reason size + else: + reason = struct.pack('!H', code) + message + reason_size = _encode_number(len(reason)) + block += reason_size + reason + + payload = _encode_channel_id(_CONTROL_CHANNEL_ID) + block + return create_binary_frame(payload, mask=outer_frame_mask) + + +def _create_flow_control(channel_id, replenished_quota, + outer_frame_mask=False): + first_byte = _MUX_OPCODE_FLOW_CONTROL << 5 + block = (chr(first_byte) + + _encode_channel_id(channel_id) + + _encode_number(replenished_quota)) + payload = _encode_channel_id(_CONTROL_CHANNEL_ID) + block + return create_binary_frame(payload, mask=outer_frame_mask) + + +def _create_new_channel_slot(slots, send_quota, outer_frame_mask=False): + if slots < 0 or send_quota < 0: + raise ValueError('slots and send_quota must be non-negative.') + first_byte = _MUX_OPCODE_NEW_CHANNEL_SLOT << 5 + block = (chr(first_byte) + + _encode_number(slots) + + _encode_number(send_quota)) + payload = _encode_channel_id(_CONTROL_CHANNEL_ID) + block + return create_binary_frame(payload, mask=outer_frame_mask) + + +def _create_fallback_new_channel_slot(outer_frame_mask=False): + first_byte = (_MUX_OPCODE_NEW_CHANNEL_SLOT << 5) | 1 # Set the F flag + block = (chr(first_byte) + _encode_number(0) + _encode_number(0)) + payload = _encode_channel_id(_CONTROL_CHANNEL_ID) + block + return create_binary_frame(payload, mask=outer_frame_mask) + + +def _parse_request_text(request_text): + request_line, header_lines = request_text.split('\r\n', 1) + + words = request_line.split(' ') + if len(words) != 3: + raise ValueError('Bad Request-Line syntax %r' % request_line) + [command, path, version] = words + if version != 'HTTP/1.1': + raise ValueError('Bad request version %r' % version) + + # email.parser.Parser() parses RFC 2822 (RFC 822) style headers. + # RFC 6455 refers RFC 2616 for handshake parsing, and RFC 2616 refers + # RFC 822. + headers = email.parser.Parser().parsestr(header_lines) + return command, path, version, headers + + +class _ControlBlock(object): + """A structure that holds parsing result of multiplexing control block. + Control block specific attributes will be added by _MuxFramePayloadParser. + (e.g. encoded_handshake will be added for AddChannelRequest and + AddChannelResponse) + """ + + def __init__(self, opcode): + self.opcode = opcode + + +class _MuxFramePayloadParser(object): + """A class that parses multiplexed frame payload.""" + + def __init__(self, payload): + self._data = payload + self._read_position = 0 + self._logger = util.get_class_logger(self) + + def read_channel_id(self): + """Reads channel id. + + Raises: + ValueError: when the payload doesn't contain + valid channel id. + """ + + remaining_length = len(self._data) - self._read_position + pos = self._read_position + if remaining_length == 0: + raise ValueError('Invalid channel id format') + + channel_id = ord(self._data[pos]) + channel_id_length = 1 + if channel_id & 0xe0 == 0xe0: + if remaining_length < 4: + raise ValueError('Invalid channel id format') + channel_id = struct.unpack('!L', + self._data[pos:pos+4])[0] & 0x1fffffff + channel_id_length = 4 + elif channel_id & 0xc0 == 0xc0: + if remaining_length < 3: + raise ValueError('Invalid channel id format') + channel_id = (((channel_id & 0x1f) << 16) + + struct.unpack('!H', self._data[pos+1:pos+3])[0]) + channel_id_length = 3 + elif channel_id & 0x80 == 0x80: + if remaining_length < 2: + raise ValueError('Invalid channel id format') + channel_id = struct.unpack('!H', + self._data[pos:pos+2])[0] & 0x3fff + channel_id_length = 2 + self._read_position += channel_id_length + + return channel_id + + def read_inner_frame(self): + """Reads an inner frame. + + Raises: + PhysicalConnectionError: when the inner frame is invalid. + """ + + if len(self._data) == self._read_position: + raise PhysicalConnectionError( + _DROP_CODE_ENCAPSULATED_FRAME_IS_TRUNCATED) + + bits = ord(self._data[self._read_position]) + self._read_position += 1 + fin = (bits & 0x80) == 0x80 + rsv1 = (bits & 0x40) == 0x40 + rsv2 = (bits & 0x20) == 0x20 + rsv3 = (bits & 0x10) == 0x10 + opcode = bits & 0xf + payload = self.remaining_data() + # Consume rest of the message which is payload data of the original + # frame. + self._read_position = len(self._data) + return fin, rsv1, rsv2, rsv3, opcode, payload + + def _read_number(self): + if self._read_position + 1 > len(self._data): + raise PhysicalConnectionError( + _DROP_CODE_INVALID_MUX_CONTROL_BLOCK, + 'Cannot read the first byte of number field') + + number = ord(self._data[self._read_position]) + if number & 0x80 == 0x80: + raise PhysicalConnectionError( + _DROP_CODE_INVALID_MUX_CONTROL_BLOCK, + 'The most significant bit of the first byte of number should ' + 'be unset') + self._read_position += 1 + pos = self._read_position + if number == 127: + if pos + 8 > len(self._data): + raise PhysicalConnectionError( + _DROP_CODE_INVALID_MUX_CONTROL_BLOCK, + 'Invalid number field') + self._read_position += 8 + number = struct.unpack('!Q', self._data[pos:pos+8])[0] + if number > 0x7FFFFFFFFFFFFFFF: + raise PhysicalConnectionError( + _DROP_CODE_INVALID_MUX_CONTROL_BLOCK, + 'Encoded number >= 2^63') + if number <= 0xFFFF: + raise PhysicalConnectionError( + _DROP_CODE_INVALID_MUX_CONTROL_BLOCK, + '%d should not be encoded by 9 bytes encoding' % number) + return number + if number == 126: + if pos + 2 > len(self._data): + raise PhysicalConnectionError( + _DROP_CODE_INVALID_MUX_CONTROL_BLOCK, + 'Invalid number field') + self._read_position += 2 + number = struct.unpack('!H', self._data[pos:pos+2])[0] + if number <= 125: + raise PhysicalConnectionError( + _DROP_CODE_INVALID_MUX_CONTROL_BLOCK, + '%d should not be encoded by 3 bytes encoding' % number) + return number + + def _read_size_and_contents(self): + """Reads data that consists of followings: + - the size of the contents encoded the same way as payload length + of the WebSocket Protocol with 1 bit padding at the head. + - the contents. + """ + + size = self._read_number() + pos = self._read_position + if pos + size > len(self._data): + raise PhysicalConnectionError( + _DROP_CODE_INVALID_MUX_CONTROL_BLOCK, + 'Cannot read %d bytes data' % size) + + self._read_position += size + return self._data[pos:pos+size] + + def _read_add_channel_request(self, first_byte, control_block): + reserved = (first_byte >> 2) & 0x7 + if reserved != 0: + raise PhysicalConnectionError( + _DROP_CODE_INVALID_MUX_CONTROL_BLOCK, + 'Reserved bits must be unset') + + # Invalid encoding will be handled by MuxHandler. + encoding = first_byte & 0x3 + try: + control_block.channel_id = self.read_channel_id() + except ValueError, e: + raise PhysicalConnectionError(_DROP_CODE_INVALID_MUX_CONTROL_BLOCK) + control_block.encoding = encoding + encoded_handshake = self._read_size_and_contents() + control_block.encoded_handshake = encoded_handshake + return control_block + + def _read_add_channel_response(self, first_byte, control_block): + reserved = (first_byte >> 2) & 0x3 + if reserved != 0: + raise PhysicalConnectionError( + _DROP_CODE_INVALID_MUX_CONTROL_BLOCK, + 'Reserved bits must be unset') + + control_block.accepted = (first_byte >> 4) & 1 + control_block.encoding = first_byte & 0x3 + try: + control_block.channel_id = self.read_channel_id() + except ValueError, e: + raise PhysicalConnectionError(_DROP_CODE_INVALID_MUX_CONTROL_BLOCK) + control_block.encoded_handshake = self._read_size_and_contents() + return control_block + + def _read_flow_control(self, first_byte, control_block): + reserved = first_byte & 0x1f + if reserved != 0: + raise PhysicalConnectionError( + _DROP_CODE_INVALID_MUX_CONTROL_BLOCK, + 'Reserved bits must be unset') + + try: + control_block.channel_id = self.read_channel_id() + except ValueError, e: + raise PhysicalConnectionError(_DROP_CODE_INVALID_MUX_CONTROL_BLOCK) + control_block.send_quota = self._read_number() + return control_block + + def _read_drop_channel(self, first_byte, control_block): + reserved = first_byte & 0x1f + if reserved != 0: + raise PhysicalConnectionError( + _DROP_CODE_INVALID_MUX_CONTROL_BLOCK, + 'Reserved bits must be unset') + + try: + control_block.channel_id = self.read_channel_id() + except ValueError, e: + raise PhysicalConnectionError(_DROP_CODE_INVALID_MUX_CONTROL_BLOCK) + reason = self._read_size_and_contents() + if len(reason) == 0: + control_block.drop_code = None + control_block.drop_message = '' + elif len(reason) >= 2: + control_block.drop_code = struct.unpack('!H', reason[:2])[0] + control_block.drop_message = reason[2:] + else: + raise PhysicalConnectionError( + _DROP_CODE_INVALID_MUX_CONTROL_BLOCK, + 'Received DropChannel that conains only 1-byte reason') + return control_block + + def _read_new_channel_slot(self, first_byte, control_block): + reserved = first_byte & 0x1e + if reserved != 0: + raise PhysicalConnectionError( + _DROP_CODE_INVALID_MUX_CONTROL_BLOCK, + 'Reserved bits must be unset') + control_block.fallback = first_byte & 1 + control_block.slots = self._read_number() + control_block.send_quota = self._read_number() + return control_block + + def read_control_blocks(self): + """Reads control block(s). + + Raises: + PhysicalConnectionError: when the payload contains invalid control + block(s). + StopIteration: when no control blocks left. + """ + + while self._read_position < len(self._data): + first_byte = ord(self._data[self._read_position]) + self._read_position += 1 + opcode = (first_byte >> 5) & 0x7 + control_block = _ControlBlock(opcode=opcode) + if opcode == _MUX_OPCODE_ADD_CHANNEL_REQUEST: + yield self._read_add_channel_request(first_byte, control_block) + elif opcode == _MUX_OPCODE_ADD_CHANNEL_RESPONSE: + yield self._read_add_channel_response( + first_byte, control_block) + elif opcode == _MUX_OPCODE_FLOW_CONTROL: + yield self._read_flow_control(first_byte, control_block) + elif opcode == _MUX_OPCODE_DROP_CHANNEL: + yield self._read_drop_channel(first_byte, control_block) + elif opcode == _MUX_OPCODE_NEW_CHANNEL_SLOT: + yield self._read_new_channel_slot(first_byte, control_block) + else: + raise PhysicalConnectionError( + _DROP_CODE_UNKNOWN_MUX_OPCODE, + 'Invalid opcode %d' % opcode) + + assert self._read_position == len(self._data) + raise StopIteration + + def remaining_data(self): + """Returns remaining data.""" + + return self._data[self._read_position:] + + +class _LogicalRequest(object): + """Mimics mod_python request.""" + + def __init__(self, channel_id, command, path, protocol, headers, + connection): + """Constructs an instance. + + Args: + channel_id: the channel id of the logical channel. + command: HTTP request command. + path: HTTP request path. + headers: HTTP headers. + connection: _LogicalConnection instance. + """ + + self.channel_id = channel_id + self.method = command + self.uri = path + self.protocol = protocol + self.headers_in = headers + self.connection = connection + self.server_terminated = False + self.client_terminated = False + + def is_https(self): + """Mimics request.is_https(). Returns False because this method is + used only by old protocols (hixie and hybi00). + """ + + return False + + +class _LogicalConnection(object): + """Mimics mod_python mp_conn.""" + + # For details, see the comment of set_read_state(). + STATE_ACTIVE = 1 + STATE_GRACEFULLY_CLOSED = 2 + STATE_TERMINATED = 3 + + def __init__(self, mux_handler, channel_id): + """Constructs an instance. + + Args: + mux_handler: _MuxHandler instance. + channel_id: channel id of this connection. + """ + + self._mux_handler = mux_handler + self._channel_id = channel_id + self._incoming_data = '' + self._write_condition = threading.Condition() + self._waiting_write_completion = False + self._read_condition = threading.Condition() + self._read_state = self.STATE_ACTIVE + + def get_local_addr(self): + """Getter to mimic mp_conn.local_addr.""" + + return self._mux_handler.physical_connection.get_local_addr() + local_addr = property(get_local_addr) + + def get_remote_addr(self): + """Getter to mimic mp_conn.remote_addr.""" + + return self._mux_handler.physical_connection.get_remote_addr() + remote_addr = property(get_remote_addr) + + def get_memorized_lines(self): + """Gets memorized lines. Not supported.""" + + raise MuxUnexpectedException('_LogicalConnection does not support ' + 'get_memorized_lines') + + def write(self, data): + """Writes data. mux_handler sends data asynchronously. The caller will + be suspended until write done. + + Args: + data: data to be written. + + Raises: + MuxUnexpectedException: when called before finishing the previous + write. + """ + + try: + self._write_condition.acquire() + if self._waiting_write_completion: + raise MuxUnexpectedException( + 'Logical connection %d is already waiting the completion ' + 'of write' % self._channel_id) + + self._waiting_write_completion = True + self._mux_handler.send_data(self._channel_id, data) + self._write_condition.wait() + finally: + self._write_condition.release() + + def write_control_data(self, data): + """Writes data via the control channel. Don't wait finishing write + because this method can be called by mux dispatcher. + + Args: + data: data to be written. + """ + + self._mux_handler.send_control_data(data) + + def notify_write_done(self): + """Called when sending data is completed.""" + + try: + self._write_condition.acquire() + if not self._waiting_write_completion: + raise MuxUnexpectedException( + 'Invalid call of notify_write_done for logical connection' + ' %d' % self._channel_id) + self._waiting_write_completion = False + self._write_condition.notify() + finally: + self._write_condition.release() + + def append_frame_data(self, frame_data): + """Appends incoming frame data. Called when mux_handler dispatches + frame data to the corresponding application. + + Args: + frame_data: incoming frame data. + """ + + self._read_condition.acquire() + self._incoming_data += frame_data + self._read_condition.notify() + self._read_condition.release() + + def read(self, length): + """Reads data. Blocks until enough data has arrived via physical + connection. + + Args: + length: length of data to be read. + Raises: + LogicalConnectionClosedException: when closing handshake for this + logical channel has been received. + ConnectionTerminatedException: when the physical connection has + closed, or an error is caused on the reader thread. + """ + + self._read_condition.acquire() + while (self._read_state == self.STATE_ACTIVE and + len(self._incoming_data) < length): + self._read_condition.wait() + + try: + if self._read_state == self.STATE_GRACEFULLY_CLOSED: + raise LogicalConnectionClosedException( + 'Logical channel %d has closed.' % self._channel_id) + elif self._read_state == self.STATE_TERMINATED: + raise ConnectionTerminatedException( + 'Receiving %d byte failed. Logical channel (%d) closed' % + (length, self._channel_id)) + + value = self._incoming_data[:length] + self._incoming_data = self._incoming_data[length:] + finally: + self._read_condition.release() + + return value + + def set_read_state(self, new_state): + """Sets the state of this connection. Called when an event for this + connection has occurred. + + Args: + new_state: state to be set. new_state must be one of followings: + - STATE_GRACEFULLY_CLOSED: when closing handshake for this + connection has been received. + - STATE_TERMINATED: when the physical connection has closed or + DropChannel of this connection has received. + """ + + self._read_condition.acquire() + self._read_state = new_state + self._read_condition.notify() + self._read_condition.release() + + +class _LogicalStream(Stream): + """Mimics the Stream class. This class interprets multiplexed WebSocket + frames. + """ + + def __init__(self, request, send_quota, receive_quota): + """Constructs an instance. + + Args: + request: _LogicalRequest instance. + send_quota: Initial send quota. + receive_quota: Initial receive quota. + """ + + # TODO(bashi): Support frame filters. + stream_options = StreamOptions() + # Physical stream is responsible for masking. + stream_options.unmask_receive = False + # Control frames can be fragmented on logical channel. + stream_options.allow_fragmented_control_frame = True + Stream.__init__(self, request, stream_options) + self._send_quota = send_quota + self._send_quota_condition = threading.Condition() + self._receive_quota = receive_quota + self._write_inner_frame_semaphore = threading.Semaphore() + + def _create_inner_frame(self, opcode, payload, end=True): + # TODO(bashi): Support extensions that use reserved bits. + first_byte = (end << 7) | opcode + return (_encode_channel_id(self._request.channel_id) + + chr(first_byte) + payload) + + def _write_inner_frame(self, opcode, payload, end=True): + payload_length = len(payload) + write_position = 0 + + try: + # An inner frame will be fragmented if there is no enough send + # quota. This semaphore ensures that fragmented inner frames are + # sent in order on the logical channel. + # Note that frames that come from other logical channels or + # multiplexing control blocks can be inserted between fragmented + # inner frames on the physical channel. + self._write_inner_frame_semaphore.acquire() + while write_position < payload_length: + try: + self._send_quota_condition.acquire() + while self._send_quota == 0: + self._logger.debug( + 'No quota. Waiting FlowControl message for %d.' % + self._request.channel_id) + self._send_quota_condition.wait() + + remaining = payload_length - write_position + write_length = min(self._send_quota, remaining) + inner_frame_end = ( + end and + (write_position + write_length == payload_length)) + + inner_frame = self._create_inner_frame( + opcode, + payload[write_position:write_position+write_length], + inner_frame_end) + frame_data = self._writer.build( + inner_frame, end=True, binary=True) + self._send_quota -= write_length + self._logger.debug('Consumed quota=%d, remaining=%d' % + (write_length, self._send_quota)) + finally: + self._send_quota_condition.release() + + # Writing data will block the worker so we need to release + # _send_quota_condition before writing. + self._logger.debug('Sending inner frame: %r' % frame_data) + self._request.connection.write(frame_data) + write_position += write_length + + opcode = common.OPCODE_CONTINUATION + + except ValueError, e: + raise BadOperationException(e) + finally: + self._write_inner_frame_semaphore.release() + + def replenish_send_quota(self, send_quota): + """Replenish send quota.""" + + self._send_quota_condition.acquire() + self._send_quota += send_quota + self._logger.debug('Replenished send quota for channel id %d: %d' % + (self._request.channel_id, self._send_quota)) + self._send_quota_condition.notify() + self._send_quota_condition.release() + + def consume_receive_quota(self, amount): + """Consumes receive quota. Returns False on failure.""" + + if self._receive_quota < amount: + self._logger.debug('Violate quota on channel id %d: %d < %d' % + (self._request.channel_id, + self._receive_quota, amount)) + return False + self._receive_quota -= amount + return True + + def send_message(self, message, end=True, binary=False): + """Override Stream.send_message.""" + + if self._request.server_terminated: + raise BadOperationException( + 'Requested send_message after sending out a closing handshake') + + if binary and isinstance(message, unicode): + raise BadOperationException( + 'Message for binary frame must be instance of str') + + if binary: + opcode = common.OPCODE_BINARY + else: + opcode = common.OPCODE_TEXT + message = message.encode('utf-8') + + self._write_inner_frame(opcode, message, end) + + def _receive_frame(self): + """Overrides Stream._receive_frame. + + In addition to call Stream._receive_frame, this method adds the amount + of payload to receiving quota and sends FlowControl to the client. + We need to do it here because Stream.receive_message() handles + control frames internally. + """ + + opcode, payload, fin, rsv1, rsv2, rsv3 = Stream._receive_frame(self) + amount = len(payload) + self._receive_quota += amount + frame_data = _create_flow_control(self._request.channel_id, + amount) + self._logger.debug('Sending flow control for %d, replenished=%d' % + (self._request.channel_id, amount)) + self._request.connection.write_control_data(frame_data) + return opcode, payload, fin, rsv1, rsv2, rsv3 + + def receive_message(self): + """Overrides Stream.receive_message.""" + + # Just call Stream.receive_message(), but catch + # LogicalConnectionClosedException, which is raised when the logical + # connection has closed gracefully. + try: + return Stream.receive_message(self) + except LogicalConnectionClosedException, e: + self._logger.debug('%s', e) + return None + + def _send_closing_handshake(self, code, reason): + """Overrides Stream._send_closing_handshake.""" + + body = create_closing_handshake_body(code, reason) + self._logger.debug('Sending closing handshake for %d: (%r, %r)' % + (self._request.channel_id, code, reason)) + self._write_inner_frame(common.OPCODE_CLOSE, body, end=True) + + self._request.server_terminated = True + + def send_ping(self, body=''): + """Overrides Stream.send_ping""" + + self._logger.debug('Sending ping on logical channel %d: %r' % + (self._request.channel_id, body)) + self._write_inner_frame(common.OPCODE_PING, body, end=True) + + self._ping_queue.append(body) + + def _send_pong(self, body): + """Overrides Stream._send_pong""" + + self._logger.debug('Sending pong on logical channel %d: %r' % + (self._request.channel_id, body)) + self._write_inner_frame(common.OPCODE_PONG, body, end=True) + + def close_connection(self, code=common.STATUS_NORMAL_CLOSURE, reason=''): + """Overrides Stream.close_connection.""" + + # TODO(bashi): Implement + self._logger.debug('Closing logical connection %d' % + self._request.channel_id) + self._request.server_terminated = True + + def _drain_received_data(self): + """Overrides Stream._drain_received_data. Nothing need to be done for + logical channel. + """ + + pass + + +class _OutgoingData(object): + """A structure that holds data to be sent via physical connection and + origin of the data. + """ + + def __init__(self, channel_id, data): + self.channel_id = channel_id + self.data = data + + +class _PhysicalConnectionWriter(threading.Thread): + """A thread that is responsible for writing data to physical connection. + + TODO(bashi): Make sure there is no thread-safety problem when the reader + thread reads data from the same socket at a time. + """ + + def __init__(self, mux_handler): + """Constructs an instance. + + Args: + mux_handler: _MuxHandler instance. + """ + + threading.Thread.__init__(self) + self._logger = util.get_class_logger(self) + self._mux_handler = mux_handler + self.setDaemon(True) + self._stop_requested = False + self._deque = collections.deque() + self._deque_condition = threading.Condition() + + def put_outgoing_data(self, data): + """Puts outgoing data. + + Args: + data: _OutgoingData instance. + + Raises: + BadOperationException: when the thread has been requested to + terminate. + """ + + try: + self._deque_condition.acquire() + if self._stop_requested: + raise BadOperationException('Cannot write data anymore') + + self._deque.append(data) + self._deque_condition.notify() + finally: + self._deque_condition.release() + + def _write_data(self, outgoing_data): + try: + self._mux_handler.physical_connection.write(outgoing_data.data) + except Exception, e: + util.prepend_message_to_exception( + 'Failed to send message to %r: ' % + (self._mux_handler.physical_connection.remote_addr,), e) + raise + + # TODO(bashi): It would be better to block the thread that sends + # control data as well. + if outgoing_data.channel_id != _CONTROL_CHANNEL_ID: + self._mux_handler.notify_write_done(outgoing_data.channel_id) + + def run(self): + self._deque_condition.acquire() + while not self._stop_requested: + if len(self._deque) == 0: + self._deque_condition.wait() + continue + + outgoing_data = self._deque.popleft() + self._deque_condition.release() + self._write_data(outgoing_data) + self._deque_condition.acquire() + + # Flush deque + try: + while len(self._deque) > 0: + outgoing_data = self._deque.popleft() + self._write_data(outgoing_data) + finally: + self._deque_condition.release() + + def stop(self): + """Stops the writer thread.""" + + self._deque_condition.acquire() + self._stop_requested = True + self._deque_condition.notify() + self._deque_condition.release() + + +class _PhysicalConnectionReader(threading.Thread): + """A thread that is responsible for reading data from physical connection. + """ + + def __init__(self, mux_handler): + """Constructs an instance. + + Args: + mux_handler: _MuxHandler instance. + """ + + threading.Thread.__init__(self) + self._logger = util.get_class_logger(self) + self._mux_handler = mux_handler + self.setDaemon(True) + + def run(self): + while True: + try: + physical_stream = self._mux_handler.physical_stream + message = physical_stream.receive_message() + if message is None: + break + # Below happens only when a data message is received. + opcode = physical_stream.get_last_received_opcode() + if opcode != common.OPCODE_BINARY: + self._mux_handler.fail_physical_connection( + _DROP_CODE_INVALID_ENCAPSULATING_MESSAGE, + 'Received a text message on physical connection') + break + + except ConnectionTerminatedException, e: + self._logger.debug('%s', e) + break + + try: + self._mux_handler.dispatch_message(message) + except PhysicalConnectionError, e: + self._mux_handler.fail_physical_connection( + e.drop_code, e.message) + break + except LogicalChannelError, e: + self._mux_handler.fail_logical_channel( + e.channel_id, e.drop_code, e.message) + except Exception, e: + self._logger.debug(traceback.format_exc()) + break + + self._mux_handler.notify_reader_done() + + +class _Worker(threading.Thread): + """A thread that is responsible for running the corresponding application + handler. + """ + + def __init__(self, mux_handler, request): + """Constructs an instance. + + Args: + mux_handler: _MuxHandler instance. + request: _LogicalRequest instance. + """ + + threading.Thread.__init__(self) + self._logger = util.get_class_logger(self) + self._mux_handler = mux_handler + self._request = request + self.setDaemon(True) + + def run(self): + self._logger.debug('Logical channel worker started. (id=%d)' % + self._request.channel_id) + try: + # Non-critical exceptions will be handled by dispatcher. + self._mux_handler.dispatcher.transfer_data(self._request) + finally: + self._mux_handler.notify_worker_done(self._request.channel_id) + + +class _MuxHandshaker(hybi.Handshaker): + """Opening handshake processor for multiplexing.""" + + _DUMMY_WEBSOCKET_KEY = 'dGhlIHNhbXBsZSBub25jZQ==' + + def __init__(self, request, dispatcher, send_quota, receive_quota): + """Constructs an instance. + Args: + request: _LogicalRequest instance. + dispatcher: Dispatcher instance (dispatch.Dispatcher). + send_quota: Initial send quota. + receive_quota: Initial receive quota. + """ + + hybi.Handshaker.__init__(self, request, dispatcher) + self._send_quota = send_quota + self._receive_quota = receive_quota + + # Append headers which should not be included in handshake field of + # AddChannelRequest. + # TODO(bashi): Make sure whether we should raise exception when + # these headers are included already. + request.headers_in[common.UPGRADE_HEADER] = ( + common.WEBSOCKET_UPGRADE_TYPE) + request.headers_in[common.CONNECTION_HEADER] = ( + common.UPGRADE_CONNECTION_TYPE) + request.headers_in[common.SEC_WEBSOCKET_VERSION_HEADER] = ( + str(common.VERSION_HYBI_LATEST)) + request.headers_in[common.SEC_WEBSOCKET_KEY_HEADER] = ( + self._DUMMY_WEBSOCKET_KEY) + + def _create_stream(self, stream_options): + """Override hybi.Handshaker._create_stream.""" + + self._logger.debug('Creating logical stream for %d' % + self._request.channel_id) + return _LogicalStream(self._request, self._send_quota, + self._receive_quota) + + def _create_handshake_response(self, accept): + """Override hybi._create_handshake_response.""" + + response = [] + + response.append('HTTP/1.1 101 Switching Protocols\r\n') + + # Upgrade, Connection and Sec-WebSocket-Accept should be excluded. + if self._request.ws_protocol is not None: + response.append('%s: %s\r\n' % ( + common.SEC_WEBSOCKET_PROTOCOL_HEADER, + self._request.ws_protocol)) + if (self._request.ws_extensions is not None and + len(self._request.ws_extensions) != 0): + response.append('%s: %s\r\n' % ( + common.SEC_WEBSOCKET_EXTENSIONS_HEADER, + common.format_extensions(self._request.ws_extensions))) + response.append('\r\n') + + return ''.join(response) + + def _send_handshake(self, accept): + """Override hybi.Handshaker._send_handshake.""" + + # Don't send handshake response for the default channel + if self._request.channel_id == _DEFAULT_CHANNEL_ID: + return + + handshake_response = self._create_handshake_response(accept) + frame_data = _create_add_channel_response( + self._request.channel_id, + handshake_response) + self._logger.debug('Sending handshake response for %d: %r' % + (self._request.channel_id, frame_data)) + self._request.connection.write_control_data(frame_data) + + +class _LogicalChannelData(object): + """A structure that holds information about logical channel. + """ + + def __init__(self, request, worker): + self.request = request + self.worker = worker + self.drop_code = _DROP_CODE_NORMAL_CLOSURE + self.drop_message = '' + + +class _HandshakeDeltaBase(object): + """A class that holds information for delta-encoded handshake.""" + + def __init__(self, headers): + self._headers = headers + + def create_headers(self, delta=None): + """Creates request headers for an AddChannelRequest that has + delta-encoded handshake. + + Args: + delta: headers should be overridden. + """ + + headers = copy.copy(self._headers) + if delta: + for key, value in delta.items(): + # The spec requires that a header with an empty value is + # removed from the delta base. + if len(value) == 0 and headers.has_key(key): + del headers[key] + else: + headers[key] = value + # TODO(bashi): Support extensions + headers['Sec-WebSocket-Extensions'] = '' + return headers + + +class _MuxHandler(object): + """Multiplexing handler. When a handler starts, it launches three + threads; the reader thread, the writer thread, and a worker thread. + + The reader thread reads data from the physical stream, i.e., the + ws_stream object of the underlying websocket connection. The reader + thread interprets multiplexed frames and dispatches them to logical + channels. Methods of this class are mostly called by the reader thread. + + The writer thread sends multiplexed frames which are created by + logical channels via the physical connection. + + The worker thread launched at the starting point handles the + "Implicitly Opened Connection". If multiplexing handler receives + an AddChannelRequest and accepts it, the handler will launch a new worker + thread and dispatch the request to it. + """ + + def __init__(self, request, dispatcher): + """Constructs an instance. + + Args: + request: mod_python request of the physical connection. + dispatcher: Dispatcher instance (dispatch.Dispatcher). + """ + + self.original_request = request + self.dispatcher = dispatcher + self.physical_connection = request.connection + self.physical_stream = request.ws_stream + self._logger = util.get_class_logger(self) + self._logical_channels = {} + self._logical_channels_condition = threading.Condition() + # Holds client's initial quota + self._channel_slots = collections.deque() + self._handshake_base = None + self._worker_done_notify_received = False + self._reader = None + self._writer = None + + def start(self): + """Starts the handler. + + Raises: + MuxUnexpectedException: when the handler already started, or when + opening handshake of the default channel fails. + """ + + if self._reader or self._writer: + raise MuxUnexpectedException('MuxHandler already started') + + self._reader = _PhysicalConnectionReader(self) + self._writer = _PhysicalConnectionWriter(self) + self._reader.start() + self._writer.start() + + # Create "Implicitly Opened Connection". + logical_connection = _LogicalConnection(self, _DEFAULT_CHANNEL_ID) + self._handshake_base = _HandshakeDeltaBase( + self.original_request.headers_in) + logical_request = _LogicalRequest( + _DEFAULT_CHANNEL_ID, + self.original_request.method, + self.original_request.uri, + self.original_request.protocol, + self._handshake_base.create_headers(), + logical_connection) + # Client's send quota for the implicitly opened connection is zero, + # but we will send FlowControl later so set the initial quota to + # _INITIAL_QUOTA_FOR_CLIENT. + self._channel_slots.append(_INITIAL_QUOTA_FOR_CLIENT) + if not self._do_handshake_for_logical_request( + logical_request, send_quota=self.original_request.mux_quota): + raise MuxUnexpectedException( + 'Failed handshake on the default channel id') + self._add_logical_channel(logical_request) + + # Send FlowControl for the implicitly opened connection. + frame_data = _create_flow_control(_DEFAULT_CHANNEL_ID, + _INITIAL_QUOTA_FOR_CLIENT) + logical_request.connection.write_control_data(frame_data) + + def add_channel_slots(self, slots, send_quota): + """Adds channel slots. + + Args: + slots: number of slots to be added. + send_quota: initial send quota for slots. + """ + + self._channel_slots.extend([send_quota] * slots) + # Send NewChannelSlot to client. + frame_data = _create_new_channel_slot(slots, send_quota) + self.send_control_data(frame_data) + + def wait_until_done(self, timeout=None): + """Waits until all workers are done. Returns False when timeout has + occurred. Returns True on success. + + Args: + timeout: timeout in sec. + """ + + self._logical_channels_condition.acquire() + try: + while len(self._logical_channels) > 0: + self._logger.debug('Waiting workers(%d)...' % + len(self._logical_channels)) + self._worker_done_notify_received = False + self._logical_channels_condition.wait(timeout) + if not self._worker_done_notify_received: + self._logger.debug('Waiting worker(s) timed out') + return False + + finally: + self._logical_channels_condition.release() + + # Flush pending outgoing data + self._writer.stop() + self._writer.join() + + return True + + def notify_write_done(self, channel_id): + """Called by the writer thread when a write operation has done. + + Args: + channel_id: objective channel id. + """ + + try: + self._logical_channels_condition.acquire() + if channel_id in self._logical_channels: + channel_data = self._logical_channels[channel_id] + channel_data.request.connection.notify_write_done() + else: + self._logger.debug('Seems that logical channel for %d has gone' + % channel_id) + finally: + self._logical_channels_condition.release() + + def send_control_data(self, data): + """Sends data via the control channel. + + Args: + data: data to be sent. + """ + + self._writer.put_outgoing_data(_OutgoingData( + channel_id=_CONTROL_CHANNEL_ID, data=data)) + + def send_data(self, channel_id, data): + """Sends data via given logical channel. This method is called by + worker threads. + + Args: + channel_id: objective channel id. + data: data to be sent. + """ + + self._writer.put_outgoing_data(_OutgoingData( + channel_id=channel_id, data=data)) + + def _send_drop_channel(self, channel_id, code=None, message=''): + frame_data = _create_drop_channel(channel_id, code, message) + self._logger.debug( + 'Sending drop channel for channel id %d' % channel_id) + self.send_control_data(frame_data) + + def _send_error_add_channel_response(self, channel_id, status=None): + if status is None: + status = common.HTTP_STATUS_BAD_REQUEST + + if status in _HTTP_BAD_RESPONSE_MESSAGES: + message = _HTTP_BAD_RESPONSE_MESSAGES[status] + else: + self._logger.debug('Response message for %d is not found' % status) + message = '???' + + response = 'HTTP/1.1 %d %s\r\n\r\n' % (status, message) + frame_data = _create_add_channel_response(channel_id, + encoded_handshake=response, + encoding=0, rejected=True) + self.send_control_data(frame_data) + + def _create_logical_request(self, block): + if block.channel_id == _CONTROL_CHANNEL_ID: + # TODO(bashi): Raise PhysicalConnectionError with code 2006 + # instead of MuxUnexpectedException. + raise MuxUnexpectedException( + 'Received the control channel id (0) as objective channel ' + 'id for AddChannel') + + if block.encoding > _HANDSHAKE_ENCODING_DELTA: + raise PhysicalConnectionError( + _DROP_CODE_UNKNOWN_REQUEST_ENCODING) + + method, path, version, headers = _parse_request_text( + block.encoded_handshake) + if block.encoding == _HANDSHAKE_ENCODING_DELTA: + headers = self._handshake_base.create_headers(headers) + + connection = _LogicalConnection(self, block.channel_id) + request = _LogicalRequest(block.channel_id, method, path, version, + headers, connection) + return request + + def _do_handshake_for_logical_request(self, request, send_quota=0): + try: + receive_quota = self._channel_slots.popleft() + except IndexError: + raise LogicalChannelError( + request.channel_id, _DROP_CODE_NEW_CHANNEL_SLOT_VIOLATION) + + handshaker = _MuxHandshaker(request, self.dispatcher, + send_quota, receive_quota) + try: + handshaker.do_handshake() + except handshake.VersionException, e: + self._logger.info('%s', e) + self._send_error_add_channel_response( + request.channel_id, status=common.HTTP_STATUS_BAD_REQUEST) + return False + except handshake.HandshakeException, e: + # TODO(bashi): Should we _Fail the Logical Channel_ with 3001 + # instead? + self._logger.info('%s', e) + self._send_error_add_channel_response(request.channel_id, + status=e.status) + return False + except handshake.AbortedByUserException, e: + self._logger.info('%s', e) + self._send_error_add_channel_response(request.channel_id) + return False + + return True + + def _add_logical_channel(self, logical_request): + try: + self._logical_channels_condition.acquire() + if logical_request.channel_id in self._logical_channels: + self._logger.debug('Channel id %d already exists' % + logical_request.channel_id) + raise PhysicalConnectionError( + _DROP_CODE_CHANNEL_ALREADY_EXISTS, + 'Channel id %d already exists' % + logical_request.channel_id) + worker = _Worker(self, logical_request) + channel_data = _LogicalChannelData(logical_request, worker) + self._logical_channels[logical_request.channel_id] = channel_data + worker.start() + finally: + self._logical_channels_condition.release() + + def _process_add_channel_request(self, block): + try: + logical_request = self._create_logical_request(block) + except ValueError, e: + self._logger.debug('Failed to create logical request: %r' % e) + self._send_error_add_channel_response( + block.channel_id, status=common.HTTP_STATUS_BAD_REQUEST) + return + if self._do_handshake_for_logical_request(logical_request): + if block.encoding == _HANDSHAKE_ENCODING_IDENTITY: + # Update handshake base. + # TODO(bashi): Make sure this is the right place to update + # handshake base. + self._handshake_base = _HandshakeDeltaBase( + logical_request.headers_in) + self._add_logical_channel(logical_request) + else: + self._send_error_add_channel_response( + block.channel_id, status=common.HTTP_STATUS_BAD_REQUEST) + + def _process_flow_control(self, block): + try: + self._logical_channels_condition.acquire() + if not block.channel_id in self._logical_channels: + return + channel_data = self._logical_channels[block.channel_id] + channel_data.request.ws_stream.replenish_send_quota( + block.send_quota) + finally: + self._logical_channels_condition.release() + + def _process_drop_channel(self, block): + self._logger.debug( + 'DropChannel received for %d: code=%r, reason=%r' % + (block.channel_id, block.drop_code, block.drop_message)) + try: + self._logical_channels_condition.acquire() + if not block.channel_id in self._logical_channels: + return + channel_data = self._logical_channels[block.channel_id] + channel_data.drop_code = _DROP_CODE_ACKNOWLEDGED + # Close the logical channel + channel_data.request.connection.set_read_state( + _LogicalConnection.STATE_TERMINATED) + finally: + self._logical_channels_condition.release() + + def _process_control_blocks(self, parser): + for control_block in parser.read_control_blocks(): + opcode = control_block.opcode + self._logger.debug('control block received, opcode: %d' % opcode) + if opcode == _MUX_OPCODE_ADD_CHANNEL_REQUEST: + self._process_add_channel_request(control_block) + elif opcode == _MUX_OPCODE_ADD_CHANNEL_RESPONSE: + raise PhysicalConnectionError( + _DROP_CODE_INVALID_MUX_CONTROL_BLOCK, + 'Received AddChannelResponse') + elif opcode == _MUX_OPCODE_FLOW_CONTROL: + self._process_flow_control(control_block) + elif opcode == _MUX_OPCODE_DROP_CHANNEL: + self._process_drop_channel(control_block) + elif opcode == _MUX_OPCODE_NEW_CHANNEL_SLOT: + raise PhysicalConnectionError( + _DROP_CODE_INVALID_MUX_CONTROL_BLOCK, + 'Received NewChannelSlot') + else: + raise MuxUnexpectedException( + 'Unexpected opcode %r' % opcode) + + def _process_logical_frame(self, channel_id, parser): + self._logger.debug('Received a frame. channel id=%d' % channel_id) + try: + self._logical_channels_condition.acquire() + if not channel_id in self._logical_channels: + # We must ignore the message for an inactive channel. + return + channel_data = self._logical_channels[channel_id] + fin, rsv1, rsv2, rsv3, opcode, payload = parser.read_inner_frame() + if not channel_data.request.ws_stream.consume_receive_quota( + len(payload)): + # The client violates quota. Close logical channel. + raise LogicalChannelError( + channel_id, _DROP_CODE_SEND_QUOTA_VIOLATION) + header = create_header(opcode, len(payload), fin, rsv1, rsv2, rsv3, + mask=False) + frame_data = header + payload + channel_data.request.connection.append_frame_data(frame_data) + finally: + self._logical_channels_condition.release() + + def dispatch_message(self, message): + """Dispatches message. The reader thread calls this method. + + Args: + message: a message that contains encapsulated frame. + Raises: + PhysicalConnectionError: if the message contains physical + connection level errors. + LogicalChannelError: if the message contains logical channel + level errors. + """ + + parser = _MuxFramePayloadParser(message) + try: + channel_id = parser.read_channel_id() + except ValueError, e: + raise PhysicalConnectionError(_DROP_CODE_CHANNEL_ID_TRUNCATED) + if channel_id == _CONTROL_CHANNEL_ID: + self._process_control_blocks(parser) + else: + self._process_logical_frame(channel_id, parser) + + def notify_worker_done(self, channel_id): + """Called when a worker has finished. + + Args: + channel_id: channel id corresponded with the worker. + """ + + self._logger.debug('Worker for channel id %d terminated' % channel_id) + try: + self._logical_channels_condition.acquire() + if not channel_id in self._logical_channels: + raise MuxUnexpectedException( + 'Channel id %d not found' % channel_id) + channel_data = self._logical_channels.pop(channel_id) + finally: + self._worker_done_notify_received = True + self._logical_channels_condition.notify() + self._logical_channels_condition.release() + + if not channel_data.request.server_terminated: + self._send_drop_channel( + channel_id, code=channel_data.drop_code, + message=channel_data.drop_message) + + def notify_reader_done(self): + """This method is called by the reader thread when the reader has + finished. + """ + + # Terminate all logical connections + self._logger.debug('termiating all logical connections...') + self._logical_channels_condition.acquire() + for channel_data in self._logical_channels.values(): + try: + channel_data.request.connection.set_read_state( + _LogicalConnection.STATE_TERMINATED) + except Exception: + pass + self._logical_channels_condition.release() + + def fail_physical_connection(self, code, message): + """Fail the physical connection. + + Args: + code: drop reason code. + message: drop message. + """ + + self._logger.debug('Failing the physical connection...') + self._send_drop_channel(_CONTROL_CHANNEL_ID, code, message) + self.physical_stream.close_connection( + common.STATUS_INTERNAL_ENDPOINT_ERROR) + + def fail_logical_channel(self, channel_id, code, message): + """Fail a logical channel. + + Args: + channel_id: channel id. + code: drop reason code. + message: drop message. + """ + + self._logger.debug('Failing logical channel %d...' % channel_id) + try: + self._logical_channels_condition.acquire() + if channel_id in self._logical_channels: + channel_data = self._logical_channels[channel_id] + # Close the logical channel. notify_worker_done() will be + # called later and it will send DropChannel. + channel_data.drop_code = code + channel_data.drop_message = message + channel_data.request.connection.set_read_state( + _LogicalConnection.STATE_TERMINATED) + else: + self._send_drop_channel(channel_id, code, message) + finally: + self._logical_channels_condition.release() + + +def use_mux(request): + return hasattr(request, 'mux') and request.mux + + +def start(request, dispatcher): + mux_handler = _MuxHandler(request, dispatcher) + mux_handler.start() + + mux_handler.add_channel_slots(_INITIAL_NUMBER_OF_CHANNEL_SLOTS, + _INITIAL_QUOTA_FOR_CLIENT) + + mux_handler.wait_until_done() + + +# vi:sts=4 sw=4 et diff --git a/pyload/lib/mod_pywebsocket/standalone.py b/pyload/lib/mod_pywebsocket/standalone.py new file mode 100755 index 000000000..07a33d9c9 --- /dev/null +++ b/pyload/lib/mod_pywebsocket/standalone.py @@ -0,0 +1,998 @@ +#!/usr/bin/env python +# +# Copyright 2012, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +"""Standalone WebSocket server. + +Use this file to launch pywebsocket without Apache HTTP Server. + + +BASIC USAGE + +Go to the src directory and run + + $ python mod_pywebsocket/standalone.py [-p ] + [-w ] + [-d ] + + is the port number to use for ws:// connection. + + is the path to the root directory of HTML files. + + is the path to the root directory of WebSocket handlers. +If not specified, will be used. See __init__.py (or +run $ pydoc mod_pywebsocket) for how to write WebSocket handlers. + +For more detail and other options, run + + $ python mod_pywebsocket/standalone.py --help + +or see _build_option_parser method below. + +For trouble shooting, adding "--log_level debug" might help you. + + +TRY DEMO + +Go to the src directory and run + + $ python standalone.py -d example + +to launch pywebsocket with the sample handler and html on port 80. Open +http://localhost/console.html, click the connect button, type something into +the text box next to the send button and click the send button. If everything +is working, you'll see the message you typed echoed by the server. + + +SUPPORTING TLS + +To support TLS, run standalone.py with -t, -k, and -c options. + + +SUPPORTING CLIENT AUTHENTICATION + +To support client authentication with TLS, run standalone.py with -t, -k, -c, +and --tls-client-auth, and --tls-client-ca options. + +E.g., $./standalone.py -d ../example -p 10443 -t -c ../test/cert/cert.pem -k +../test/cert/key.pem --tls-client-auth --tls-client-ca=../test/cert/cacert.pem + + +CONFIGURATION FILE + +You can also write a configuration file and use it by specifying the path to +the configuration file by --config option. Please write a configuration file +following the documentation of the Python ConfigParser library. Name of each +entry must be the long version argument name. E.g. to set log level to debug, +add the following line: + +log_level=debug + +For options which doesn't take value, please add some fake value. E.g. for +--tls option, add the following line: + +tls=True + +Note that tls will be enabled even if you write tls=False as the value part is +fake. + +When both a command line argument and a configuration file entry are set for +the same configuration item, the command line value will override one in the +configuration file. + + +THREADING + +This server is derived from SocketServer.ThreadingMixIn. Hence a thread is +used for each request. + + +SECURITY WARNING + +This uses CGIHTTPServer and CGIHTTPServer is not secure. +It may execute arbitrary Python code or external programs. It should not be +used outside a firewall. +""" + +import BaseHTTPServer +import CGIHTTPServer +import SimpleHTTPServer +import SocketServer +import ConfigParser +import base64 +import httplib +import logging +import logging.handlers +import optparse +import os +import re +import select +import socket +import sys +import threading +import time + +_HAS_SSL = False +_HAS_OPEN_SSL = False +try: + import ssl + _HAS_SSL = True +except ImportError: + try: + import OpenSSL.SSL + _HAS_OPEN_SSL = True + except ImportError: + pass + +from mod_pywebsocket import common +from mod_pywebsocket import dispatch +from mod_pywebsocket import handshake +from mod_pywebsocket import http_header_util +from mod_pywebsocket import memorizingfile +from mod_pywebsocket import util + + +_DEFAULT_LOG_MAX_BYTES = 1024 * 256 +_DEFAULT_LOG_BACKUP_COUNT = 5 + +_DEFAULT_REQUEST_QUEUE_SIZE = 128 + +# 1024 is practically large enough to contain WebSocket handshake lines. +_MAX_MEMORIZED_LINES = 1024 + + +class _StandaloneConnection(object): + """Mimic mod_python mp_conn.""" + + def __init__(self, request_handler): + """Construct an instance. + + Args: + request_handler: A WebSocketRequestHandler instance. + """ + + self._request_handler = request_handler + + def get_local_addr(self): + """Getter to mimic mp_conn.local_addr.""" + + return (self._request_handler.server.server_name, + self._request_handler.server.server_port) + local_addr = property(get_local_addr) + + def get_remote_addr(self): + """Getter to mimic mp_conn.remote_addr. + + Setting the property in __init__ won't work because the request + handler is not initialized yet there.""" + + return self._request_handler.client_address + remote_addr = property(get_remote_addr) + + def write(self, data): + """Mimic mp_conn.write().""" + + return self._request_handler.wfile.write(data) + + def read(self, length): + """Mimic mp_conn.read().""" + + return self._request_handler.rfile.read(length) + + def get_memorized_lines(self): + """Get memorized lines.""" + + return self._request_handler.rfile.get_memorized_lines() + + +class _StandaloneRequest(object): + """Mimic mod_python request.""" + + def __init__(self, request_handler, use_tls): + """Construct an instance. + + Args: + request_handler: A WebSocketRequestHandler instance. + """ + + self._logger = util.get_class_logger(self) + + self._request_handler = request_handler + self.connection = _StandaloneConnection(request_handler) + self._use_tls = use_tls + self.headers_in = request_handler.headers + + def get_uri(self): + """Getter to mimic request.uri.""" + + return self._request_handler.path + uri = property(get_uri) + + def get_method(self): + """Getter to mimic request.method.""" + + return self._request_handler.command + method = property(get_method) + + def get_protocol(self): + """Getter to mimic request.protocol.""" + + return self._request_handler.request_version + protocol = property(get_protocol) + + def is_https(self): + """Mimic request.is_https().""" + + return self._use_tls + + def _drain_received_data(self): + """Don't use this method from WebSocket handler. Drains unread data + in the receive buffer. + """ + + raw_socket = self._request_handler.connection + drained_data = util.drain_received_data(raw_socket) + + if drained_data: + self._logger.debug( + 'Drained data following close frame: %r', drained_data) + + +class _StandaloneSSLConnection(object): + """A wrapper class for OpenSSL.SSL.Connection to provide makefile method + which is not supported by the class. + """ + + def __init__(self, connection): + self._connection = connection + + def __getattribute__(self, name): + if name in ('_connection', 'makefile'): + return object.__getattribute__(self, name) + return self._connection.__getattribute__(name) + + def __setattr__(self, name, value): + if name in ('_connection', 'makefile'): + return object.__setattr__(self, name, value) + return self._connection.__setattr__(name, value) + + def makefile(self, mode='r', bufsize=-1): + return socket._fileobject(self._connection, mode, bufsize) + + +def _alias_handlers(dispatcher, websock_handlers_map_file): + """Set aliases specified in websock_handler_map_file in dispatcher. + + Args: + dispatcher: dispatch.Dispatcher instance + websock_handler_map_file: alias map file + """ + + fp = open(websock_handlers_map_file) + try: + for line in fp: + if line[0] == '#' or line.isspace(): + continue + m = re.match('(\S+)\s+(\S+)', line) + if not m: + logging.warning('Wrong format in map file:' + line) + continue + try: + dispatcher.add_resource_path_alias( + m.group(1), m.group(2)) + except dispatch.DispatchException, e: + logging.error(str(e)) + finally: + fp.close() + + +class WebSocketServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer): + """HTTPServer specialized for WebSocket.""" + + # Overrides SocketServer.ThreadingMixIn.daemon_threads + daemon_threads = True + # Overrides BaseHTTPServer.HTTPServer.allow_reuse_address + allow_reuse_address = True + + def __init__(self, options): + """Override SocketServer.TCPServer.__init__ to set SSL enabled + socket object to self.socket before server_bind and server_activate, + if necessary. + """ + + # Share a Dispatcher among request handlers to save time for + # instantiation. Dispatcher can be shared because it is thread-safe. + options.dispatcher = dispatch.Dispatcher( + options.websock_handlers, + options.scan_dir, + options.allow_handlers_outside_root_dir) + if options.websock_handlers_map_file: + _alias_handlers(options.dispatcher, + options.websock_handlers_map_file) + warnings = options.dispatcher.source_warnings() + if warnings: + for warning in warnings: + logging.warning('mod_pywebsocket: %s' % warning) + + self._logger = util.get_class_logger(self) + + self.request_queue_size = options.request_queue_size + self.__ws_is_shut_down = threading.Event() + self.__ws_serving = False + + SocketServer.BaseServer.__init__( + self, (options.server_host, options.port), WebSocketRequestHandler) + + # Expose the options object to allow handler objects access it. We name + # it with websocket_ prefix to avoid conflict. + self.websocket_server_options = options + + self._create_sockets() + self.server_bind() + self.server_activate() + + def _create_sockets(self): + self.server_name, self.server_port = self.server_address + self._sockets = [] + if not self.server_name: + # On platforms that doesn't support IPv6, the first bind fails. + # On platforms that supports IPv6 + # - If it binds both IPv4 and IPv6 on call with AF_INET6, the + # first bind succeeds and the second fails (we'll see 'Address + # already in use' error). + # - If it binds only IPv6 on call with AF_INET6, both call are + # expected to succeed to listen both protocol. + addrinfo_array = [ + (socket.AF_INET6, socket.SOCK_STREAM, '', '', ''), + (socket.AF_INET, socket.SOCK_STREAM, '', '', '')] + else: + addrinfo_array = socket.getaddrinfo(self.server_name, + self.server_port, + socket.AF_UNSPEC, + socket.SOCK_STREAM, + socket.IPPROTO_TCP) + for addrinfo in addrinfo_array: + self._logger.info('Create socket on: %r', addrinfo) + family, socktype, proto, canonname, sockaddr = addrinfo + try: + socket_ = socket.socket(family, socktype) + except Exception, e: + self._logger.info('Skip by failure: %r', e) + continue + if self.websocket_server_options.use_tls: + if _HAS_SSL: + if self.websocket_server_options.tls_client_auth: + client_cert_ = ssl.CERT_REQUIRED + else: + client_cert_ = ssl.CERT_NONE + socket_ = ssl.wrap_socket(socket_, + keyfile=self.websocket_server_options.private_key, + certfile=self.websocket_server_options.certificate, + ssl_version=ssl.PROTOCOL_SSLv23, + ca_certs=self.websocket_server_options.tls_client_ca, + cert_reqs=client_cert_) + if _HAS_OPEN_SSL: + ctx = OpenSSL.SSL.Context(OpenSSL.SSL.SSLv23_METHOD) + ctx.use_privatekey_file( + self.websocket_server_options.private_key) + ctx.use_certificate_file( + self.websocket_server_options.certificate) + socket_ = OpenSSL.SSL.Connection(ctx, socket_) + self._sockets.append((socket_, addrinfo)) + + def server_bind(self): + """Override SocketServer.TCPServer.server_bind to enable multiple + sockets bind. + """ + + failed_sockets = [] + + for socketinfo in self._sockets: + socket_, addrinfo = socketinfo + self._logger.info('Bind on: %r', addrinfo) + if self.allow_reuse_address: + socket_.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + try: + socket_.bind(self.server_address) + except Exception, e: + self._logger.info('Skip by failure: %r', e) + socket_.close() + failed_sockets.append(socketinfo) + if self.server_address[1] == 0: + # The operating system assigns the actual port number for port + # number 0. This case, the second and later sockets should use + # the same port number. Also self.server_port is rewritten + # because it is exported, and will be used by external code. + self.server_address = ( + self.server_name, socket_.getsockname()[1]) + self.server_port = self.server_address[1] + self._logger.info('Port %r is assigned', self.server_port) + + for socketinfo in failed_sockets: + self._sockets.remove(socketinfo) + + def server_activate(self): + """Override SocketServer.TCPServer.server_activate to enable multiple + sockets listen. + """ + + failed_sockets = [] + + for socketinfo in self._sockets: + socket_, addrinfo = socketinfo + self._logger.info('Listen on: %r', addrinfo) + try: + socket_.listen(self.request_queue_size) + except Exception, e: + self._logger.info('Skip by failure: %r', e) + socket_.close() + failed_sockets.append(socketinfo) + + for socketinfo in failed_sockets: + self._sockets.remove(socketinfo) + + if len(self._sockets) == 0: + self._logger.critical( + 'No sockets activated. Use info log level to see the reason.') + + def server_close(self): + """Override SocketServer.TCPServer.server_close to enable multiple + sockets close. + """ + + for socketinfo in self._sockets: + socket_, addrinfo = socketinfo + self._logger.info('Close on: %r', addrinfo) + socket_.close() + + def fileno(self): + """Override SocketServer.TCPServer.fileno.""" + + self._logger.critical('Not supported: fileno') + return self._sockets[0][0].fileno() + + def handle_error(self, rquest, client_address): + """Override SocketServer.handle_error.""" + + self._logger.error( + 'Exception in processing request from: %r\n%s', + client_address, + util.get_stack_trace()) + # Note: client_address is a tuple. + + def get_request(self): + """Override TCPServer.get_request to wrap OpenSSL.SSL.Connection + object with _StandaloneSSLConnection to provide makefile method. We + cannot substitute OpenSSL.SSL.Connection.makefile since it's readonly + attribute. + """ + + accepted_socket, client_address = self.socket.accept() + if self.websocket_server_options.use_tls and _HAS_OPEN_SSL: + accepted_socket = _StandaloneSSLConnection(accepted_socket) + return accepted_socket, client_address + + def serve_forever(self, poll_interval=0.5): + """Override SocketServer.BaseServer.serve_forever.""" + + self.__ws_serving = True + self.__ws_is_shut_down.clear() + handle_request = self.handle_request + if hasattr(self, '_handle_request_noblock'): + handle_request = self._handle_request_noblock + else: + self._logger.warning('Fallback to blocking request handler') + try: + while self.__ws_serving: + r, w, e = select.select( + [socket_[0] for socket_ in self._sockets], + [], [], poll_interval) + for socket_ in r: + self.socket = socket_ + handle_request() + self.socket = None + finally: + self.__ws_is_shut_down.set() + + def shutdown(self): + """Override SocketServer.BaseServer.shutdown.""" + + self.__ws_serving = False + self.__ws_is_shut_down.wait() + + +class WebSocketRequestHandler(CGIHTTPServer.CGIHTTPRequestHandler): + """CGIHTTPRequestHandler specialized for WebSocket.""" + + # Use httplib.HTTPMessage instead of mimetools.Message. + MessageClass = httplib.HTTPMessage + + def setup(self): + """Override SocketServer.StreamRequestHandler.setup to wrap rfile + with MemorizingFile. + + This method will be called by BaseRequestHandler's constructor + before calling BaseHTTPRequestHandler.handle. + BaseHTTPRequestHandler.handle will call + BaseHTTPRequestHandler.handle_one_request and it will call + WebSocketRequestHandler.parse_request. + """ + + # Call superclass's setup to prepare rfile, wfile, etc. See setup + # definition on the root class SocketServer.StreamRequestHandler to + # understand what this does. + CGIHTTPServer.CGIHTTPRequestHandler.setup(self) + + self.rfile = memorizingfile.MemorizingFile( + self.rfile, + max_memorized_lines=_MAX_MEMORIZED_LINES) + + def __init__(self, request, client_address, server): + self._logger = util.get_class_logger(self) + + self._options = server.websocket_server_options + + # Overrides CGIHTTPServerRequestHandler.cgi_directories. + self.cgi_directories = self._options.cgi_directories + # Replace CGIHTTPRequestHandler.is_executable method. + if self._options.is_executable_method is not None: + self.is_executable = self._options.is_executable_method + + # This actually calls BaseRequestHandler.__init__. + CGIHTTPServer.CGIHTTPRequestHandler.__init__( + self, request, client_address, server) + + def parse_request(self): + """Override BaseHTTPServer.BaseHTTPRequestHandler.parse_request. + + Return True to continue processing for HTTP(S), False otherwise. + + See BaseHTTPRequestHandler.handle_one_request method which calls + this method to understand how the return value will be handled. + """ + + # We hook parse_request method, but also call the original + # CGIHTTPRequestHandler.parse_request since when we return False, + # CGIHTTPRequestHandler.handle_one_request continues processing and + # it needs variables set by CGIHTTPRequestHandler.parse_request. + # + # Variables set by this method will be also used by WebSocket request + # handling (self.path, self.command, self.requestline, etc. See also + # how _StandaloneRequest's members are implemented using these + # attributes). + if not CGIHTTPServer.CGIHTTPRequestHandler.parse_request(self): + return False + + if self._options.use_basic_auth: + auth = self.headers.getheader('Authorization') + if auth != self._options.basic_auth_credential: + self.send_response(401) + self.send_header('WWW-Authenticate', + 'Basic realm="Pywebsocket"') + self.end_headers() + self._logger.info('Request basic authentication') + return True + + host, port, resource = http_header_util.parse_uri(self.path) + if resource is None: + self._logger.info('Invalid URI: %r', self.path) + self._logger.info('Fallback to CGIHTTPRequestHandler') + return True + server_options = self.server.websocket_server_options + if host is not None: + validation_host = server_options.validation_host + if validation_host is not None and host != validation_host: + self._logger.info('Invalid host: %r (expected: %r)', + host, + validation_host) + self._logger.info('Fallback to CGIHTTPRequestHandler') + return True + if port is not None: + validation_port = server_options.validation_port + if validation_port is not None and port != validation_port: + self._logger.info('Invalid port: %r (expected: %r)', + port, + validation_port) + self._logger.info('Fallback to CGIHTTPRequestHandler') + return True + self.path = resource + + request = _StandaloneRequest(self, self._options.use_tls) + + try: + # Fallback to default http handler for request paths for which + # we don't have request handlers. + if not self._options.dispatcher.get_handler_suite(self.path): + self._logger.info('No handler for resource: %r', + self.path) + self._logger.info('Fallback to CGIHTTPRequestHandler') + return True + except dispatch.DispatchException, e: + self._logger.info('%s', e) + self.send_error(e.status) + return False + + # If any Exceptions without except clause setup (including + # DispatchException) is raised below this point, it will be caught + # and logged by WebSocketServer. + + try: + try: + handshake.do_handshake( + request, + self._options.dispatcher, + allowDraft75=self._options.allow_draft75, + strict=self._options.strict) + except handshake.VersionException, e: + self._logger.info('%s', e) + self.send_response(common.HTTP_STATUS_BAD_REQUEST) + self.send_header(common.SEC_WEBSOCKET_VERSION_HEADER, + e.supported_versions) + self.end_headers() + return False + except handshake.HandshakeException, e: + # Handshake for ws(s) failed. + self._logger.info('%s', e) + self.send_error(e.status) + return False + + request._dispatcher = self._options.dispatcher + self._options.dispatcher.transfer_data(request) + except handshake.AbortedByUserException, e: + self._logger.info('%s', e) + return False + + def log_request(self, code='-', size='-'): + """Override BaseHTTPServer.log_request.""" + + self._logger.info('"%s" %s %s', + self.requestline, str(code), str(size)) + + def log_error(self, *args): + """Override BaseHTTPServer.log_error.""" + + # Despite the name, this method is for warnings than for errors. + # For example, HTTP status code is logged by this method. + self._logger.warning('%s - %s', + self.address_string(), + args[0] % args[1:]) + + def is_cgi(self): + """Test whether self.path corresponds to a CGI script. + + Add extra check that self.path doesn't contains .. + Also check if the file is a executable file or not. + If the file is not executable, it is handled as static file or dir + rather than a CGI script. + """ + + if CGIHTTPServer.CGIHTTPRequestHandler.is_cgi(self): + if '..' in self.path: + return False + # strip query parameter from request path + resource_name = self.path.split('?', 2)[0] + # convert resource_name into real path name in filesystem. + scriptfile = self.translate_path(resource_name) + if not os.path.isfile(scriptfile): + return False + if not self.is_executable(scriptfile): + return False + return True + return False + + +def _get_logger_from_class(c): + return logging.getLogger('%s.%s' % (c.__module__, c.__name__)) + + +def _configure_logging(options): + logging.addLevelName(common.LOGLEVEL_FINE, 'FINE') + + logger = logging.getLogger() + logger.setLevel(logging.getLevelName(options.log_level.upper())) + if options.log_file: + handler = logging.handlers.RotatingFileHandler( + options.log_file, 'a', options.log_max, options.log_count) + else: + handler = logging.StreamHandler() + formatter = logging.Formatter( + '[%(asctime)s] [%(levelname)s] %(name)s: %(message)s') + handler.setFormatter(formatter) + logger.addHandler(handler) + + deflate_log_level_name = logging.getLevelName( + options.deflate_log_level.upper()) + _get_logger_from_class(util._Deflater).setLevel( + deflate_log_level_name) + _get_logger_from_class(util._Inflater).setLevel( + deflate_log_level_name) + + +def _build_option_parser(): + parser = optparse.OptionParser() + + parser.add_option('--config', dest='config_file', type='string', + default=None, + help=('Path to configuration file. See the file comment ' + 'at the top of this file for the configuration ' + 'file format')) + parser.add_option('-H', '--server-host', '--server_host', + dest='server_host', + default='', + help='server hostname to listen to') + parser.add_option('-V', '--validation-host', '--validation_host', + dest='validation_host', + default=None, + help='server hostname to validate in absolute path.') + parser.add_option('-p', '--port', dest='port', type='int', + default=common.DEFAULT_WEB_SOCKET_PORT, + help='port to listen to') + parser.add_option('-P', '--validation-port', '--validation_port', + dest='validation_port', type='int', + default=None, + help='server port to validate in absolute path.') + parser.add_option('-w', '--websock-handlers', '--websock_handlers', + dest='websock_handlers', + default='.', + help=('The root directory of WebSocket handler files. ' + 'If the path is relative, --document-root is used ' + 'as the base.')) + parser.add_option('-m', '--websock-handlers-map-file', + '--websock_handlers_map_file', + dest='websock_handlers_map_file', + default=None, + help=('WebSocket handlers map file. ' + 'Each line consists of alias_resource_path and ' + 'existing_resource_path, separated by spaces.')) + parser.add_option('-s', '--scan-dir', '--scan_dir', dest='scan_dir', + default=None, + help=('Must be a directory under --websock-handlers. ' + 'Only handlers under this directory are scanned ' + 'and registered to the server. ' + 'Useful for saving scan time when the handler ' + 'root directory contains lots of files that are ' + 'not handler file or are handler files but you ' + 'don\'t want them to be registered. ')) + parser.add_option('--allow-handlers-outside-root-dir', + '--allow_handlers_outside_root_dir', + dest='allow_handlers_outside_root_dir', + action='store_true', + default=False, + help=('Scans WebSocket handlers even if their canonical ' + 'path is not under --websock-handlers.')) + parser.add_option('-d', '--document-root', '--document_root', + dest='document_root', default='.', + help='Document root directory.') + parser.add_option('-x', '--cgi-paths', '--cgi_paths', dest='cgi_paths', + default=None, + help=('CGI paths relative to document_root.' + 'Comma-separated. (e.g -x /cgi,/htbin) ' + 'Files under document_root/cgi_path are handled ' + 'as CGI programs. Must be executable.')) + parser.add_option('-t', '--tls', dest='use_tls', action='store_true', + default=False, help='use TLS (wss://)') + parser.add_option('-k', '--private-key', '--private_key', + dest='private_key', + default='', help='TLS private key file.') + parser.add_option('-c', '--certificate', dest='certificate', + default='', help='TLS certificate file.') + parser.add_option('--tls-client-auth', dest='tls_client_auth', + action='store_true', default=False, + help='Requires TLS client auth on every connection.') + parser.add_option('--tls-client-ca', dest='tls_client_ca', default='', + help=('Specifies a pem file which contains a set of ' + 'concatenated CA certificates which are used to ' + 'validate certificates passed from clients')) + parser.add_option('--basic-auth', dest='use_basic_auth', + action='store_true', default=False, + help='Requires Basic authentication.') + parser.add_option('--basic-auth-credential', + dest='basic_auth_credential', default='test:test', + help='Specifies the credential of basic authentication ' + 'by username:password pair (e.g. test:test).') + parser.add_option('-l', '--log-file', '--log_file', dest='log_file', + default='', help='Log file.') + # Custom log level: + # - FINE: Prints status of each frame processing step + parser.add_option('--log-level', '--log_level', type='choice', + dest='log_level', default='warn', + choices=['fine', + 'debug', 'info', 'warning', 'warn', 'error', + 'critical'], + help='Log level.') + parser.add_option('--deflate-log-level', '--deflate_log_level', + type='choice', + dest='deflate_log_level', default='warn', + choices=['debug', 'info', 'warning', 'warn', 'error', + 'critical'], + help='Log level for _Deflater and _Inflater.') + parser.add_option('--thread-monitor-interval-in-sec', + '--thread_monitor_interval_in_sec', + dest='thread_monitor_interval_in_sec', + type='int', default=-1, + help=('If positive integer is specified, run a thread ' + 'monitor to show the status of server threads ' + 'periodically in the specified inteval in ' + 'second. If non-positive integer is specified, ' + 'disable the thread monitor.')) + parser.add_option('--log-max', '--log_max', dest='log_max', type='int', + default=_DEFAULT_LOG_MAX_BYTES, + help='Log maximum bytes') + parser.add_option('--log-count', '--log_count', dest='log_count', + type='int', default=_DEFAULT_LOG_BACKUP_COUNT, + help='Log backup count') + parser.add_option('--allow-draft75', dest='allow_draft75', + action='store_true', default=False, + help='Obsolete option. Ignored.') + parser.add_option('--strict', dest='strict', action='store_true', + default=False, help='Obsolete option. Ignored.') + parser.add_option('-q', '--queue', dest='request_queue_size', type='int', + default=_DEFAULT_REQUEST_QUEUE_SIZE, + help='request queue size') + + return parser + + +class ThreadMonitor(threading.Thread): + daemon = True + + def __init__(self, interval_in_sec): + threading.Thread.__init__(self, name='ThreadMonitor') + + self._logger = util.get_class_logger(self) + + self._interval_in_sec = interval_in_sec + + def run(self): + while True: + thread_name_list = [] + for thread in threading.enumerate(): + thread_name_list.append(thread.name) + self._logger.info( + "%d active threads: %s", + threading.active_count(), + ', '.join(thread_name_list)) + time.sleep(self._interval_in_sec) + + +def _parse_args_and_config(args): + parser = _build_option_parser() + + # First, parse options without configuration file. + temporary_options, temporary_args = parser.parse_args(args=args) + if temporary_args: + logging.critical( + 'Unrecognized positional arguments: %r', temporary_args) + sys.exit(1) + + if temporary_options.config_file: + try: + config_fp = open(temporary_options.config_file, 'r') + except IOError, e: + logging.critical( + 'Failed to open configuration file %r: %r', + temporary_options.config_file, + e) + sys.exit(1) + + config_parser = ConfigParser.SafeConfigParser() + config_parser.readfp(config_fp) + config_fp.close() + + args_from_config = [] + for name, value in config_parser.items('pywebsocket'): + args_from_config.append('--' + name) + args_from_config.append(value) + if args is None: + args = args_from_config + else: + args = args_from_config + args + return parser.parse_args(args=args) + else: + return temporary_options, temporary_args + + +def _main(args=None): + """You can call this function from your own program, but please note that + this function has some side-effects that might affect your program. For + example, util.wrap_popen3_for_win use in this method replaces implementation + of os.popen3. + """ + + options, args = _parse_args_and_config(args=args) + + os.chdir(options.document_root) + + _configure_logging(options) + + # TODO(tyoshino): Clean up initialization of CGI related values. Move some + # of code here to WebSocketRequestHandler class if it's better. + options.cgi_directories = [] + options.is_executable_method = None + if options.cgi_paths: + options.cgi_directories = options.cgi_paths.split(',') + if sys.platform in ('cygwin', 'win32'): + cygwin_path = None + # For Win32 Python, it is expected that CYGWIN_PATH + # is set to a directory of cygwin binaries. + # For example, websocket_server.py in Chromium sets CYGWIN_PATH to + # full path of third_party/cygwin/bin. + if 'CYGWIN_PATH' in os.environ: + cygwin_path = os.environ['CYGWIN_PATH'] + util.wrap_popen3_for_win(cygwin_path) + + def __check_script(scriptpath): + return util.get_script_interp(scriptpath, cygwin_path) + + options.is_executable_method = __check_script + + if options.use_tls: + if not (_HAS_SSL or _HAS_OPEN_SSL): + logging.critical('TLS support requires ssl or pyOpenSSL module.') + sys.exit(1) + if not options.private_key or not options.certificate: + logging.critical( + 'To use TLS, specify private_key and certificate.') + sys.exit(1) + + if options.tls_client_auth: + if not options.use_tls: + logging.critical('TLS must be enabled for client authentication.') + sys.exit(1) + if not _HAS_SSL: + logging.critical('Client authentication requires ssl module.') + + if not options.scan_dir: + options.scan_dir = options.websock_handlers + + if options.use_basic_auth: + options.basic_auth_credential = 'Basic ' + base64.b64encode( + options.basic_auth_credential) + + try: + if options.thread_monitor_interval_in_sec > 0: + # Run a thread monitor to show the status of server threads for + # debugging. + ThreadMonitor(options.thread_monitor_interval_in_sec).start() + + server = WebSocketServer(options) + server.serve_forever() + except Exception, e: + logging.critical('mod_pywebsocket: %s' % e) + logging.critical('mod_pywebsocket: %s' % util.get_stack_trace()) + sys.exit(1) + + +if __name__ == '__main__': + _main(sys.argv[1:]) + + +# vi:sts=4 sw=4 et diff --git a/pyload/lib/mod_pywebsocket/stream.py b/pyload/lib/mod_pywebsocket/stream.py new file mode 100644 index 000000000..edc533279 --- /dev/null +++ b/pyload/lib/mod_pywebsocket/stream.py @@ -0,0 +1,57 @@ +# Copyright 2011, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +"""This file exports public symbols. +""" + + +from mod_pywebsocket._stream_base import BadOperationException +from mod_pywebsocket._stream_base import ConnectionTerminatedException +from mod_pywebsocket._stream_base import InvalidFrameException +from mod_pywebsocket._stream_base import InvalidUTF8Exception +from mod_pywebsocket._stream_base import UnsupportedFrameException +from mod_pywebsocket._stream_hixie75 import StreamHixie75 +from mod_pywebsocket._stream_hybi import Frame +from mod_pywebsocket._stream_hybi import Stream +from mod_pywebsocket._stream_hybi import StreamOptions + +# These methods are intended to be used by WebSocket client developers to have +# their implementations receive broken data in tests. +from mod_pywebsocket._stream_hybi import create_close_frame +from mod_pywebsocket._stream_hybi import create_header +from mod_pywebsocket._stream_hybi import create_length_header +from mod_pywebsocket._stream_hybi import create_ping_frame +from mod_pywebsocket._stream_hybi import create_pong_frame +from mod_pywebsocket._stream_hybi import create_binary_frame +from mod_pywebsocket._stream_hybi import create_text_frame +from mod_pywebsocket._stream_hybi import create_closing_handshake_body + + +# vi:sts=4 sw=4 et diff --git a/pyload/lib/mod_pywebsocket/util.py b/pyload/lib/mod_pywebsocket/util.py new file mode 100644 index 000000000..7bb0b5d9e --- /dev/null +++ b/pyload/lib/mod_pywebsocket/util.py @@ -0,0 +1,515 @@ +# Copyright 2011, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +"""WebSocket utilities. +""" + + +import array +import errno + +# Import hash classes from a module available and recommended for each Python +# version and re-export those symbol. Use sha and md5 module in Python 2.4, and +# hashlib module in Python 2.6. +try: + import hashlib + md5_hash = hashlib.md5 + sha1_hash = hashlib.sha1 +except ImportError: + import md5 + import sha + md5_hash = md5.md5 + sha1_hash = sha.sha + +import StringIO +import logging +import os +import re +import socket +import traceback +import zlib + + +def get_stack_trace(): + """Get the current stack trace as string. + + This is needed to support Python 2.3. + TODO: Remove this when we only support Python 2.4 and above. + Use traceback.format_exc instead. + """ + + out = StringIO.StringIO() + traceback.print_exc(file=out) + return out.getvalue() + + +def prepend_message_to_exception(message, exc): + """Prepend message to the exception.""" + + exc.args = (message + str(exc),) + return + + +def __translate_interp(interp, cygwin_path): + """Translate interp program path for Win32 python to run cygwin program + (e.g. perl). Note that it doesn't support path that contains space, + which is typically true for Unix, where #!-script is written. + For Win32 python, cygwin_path is a directory of cygwin binaries. + + Args: + interp: interp command line + cygwin_path: directory name of cygwin binary, or None + Returns: + translated interp command line. + """ + if not cygwin_path: + return interp + m = re.match('^[^ ]*/([^ ]+)( .*)?', interp) + if m: + cmd = os.path.join(cygwin_path, m.group(1)) + return cmd + m.group(2) + return interp + + +def get_script_interp(script_path, cygwin_path=None): + """Gets #!-interpreter command line from the script. + + It also fixes command path. When Cygwin Python is used, e.g. in WebKit, + it could run "/usr/bin/perl -wT hello.pl". + When Win32 Python is used, e.g. in Chromium, it couldn't. So, fix + "/usr/bin/perl" to "\perl.exe". + + Args: + script_path: pathname of the script + cygwin_path: directory name of cygwin binary, or None + Returns: + #!-interpreter command line, or None if it is not #!-script. + """ + fp = open(script_path) + line = fp.readline() + fp.close() + m = re.match('^#!(.*)', line) + if m: + return __translate_interp(m.group(1), cygwin_path) + return None + + +def wrap_popen3_for_win(cygwin_path): + """Wrap popen3 to support #!-script on Windows. + + Args: + cygwin_path: path for cygwin binary if command path is needed to be + translated. None if no translation required. + """ + + __orig_popen3 = os.popen3 + + def __wrap_popen3(cmd, mode='t', bufsize=-1): + cmdline = cmd.split(' ') + interp = get_script_interp(cmdline[0], cygwin_path) + if interp: + cmd = interp + ' ' + cmd + return __orig_popen3(cmd, mode, bufsize) + + os.popen3 = __wrap_popen3 + + +def hexify(s): + return ' '.join(map(lambda x: '%02x' % ord(x), s)) + + +def get_class_logger(o): + return logging.getLogger( + '%s.%s' % (o.__class__.__module__, o.__class__.__name__)) + + +class NoopMasker(object): + """A masking object that has the same interface as RepeatedXorMasker but + just returns the string passed in without making any change. + """ + + def __init__(self): + pass + + def mask(self, s): + return s + + +class RepeatedXorMasker(object): + """A masking object that applies XOR on the string given to mask method + with the masking bytes given to the constructor repeatedly. This object + remembers the position in the masking bytes the last mask method call + ended and resumes from that point on the next mask method call. + """ + + def __init__(self, mask): + self._mask = map(ord, mask) + self._mask_size = len(self._mask) + self._count = 0 + + def mask(self, s): + result = array.array('B') + result.fromstring(s) + # Use temporary local variables to eliminate the cost to access + # attributes + count = self._count + mask = self._mask + mask_size = self._mask_size + for i in xrange(len(result)): + result[i] ^= mask[count] + count = (count + 1) % mask_size + self._count = count + + return result.tostring() + + +class DeflateRequest(object): + """A wrapper class for request object to intercept send and recv to perform + deflate compression and decompression transparently. + """ + + def __init__(self, request): + self._request = request + self.connection = DeflateConnection(request.connection) + + def __getattribute__(self, name): + if name in ('_request', 'connection'): + return object.__getattribute__(self, name) + return self._request.__getattribute__(name) + + def __setattr__(self, name, value): + if name in ('_request', 'connection'): + return object.__setattr__(self, name, value) + return self._request.__setattr__(name, value) + + +# By making wbits option negative, we can suppress CMF/FLG (2 octet) and +# ADLER32 (4 octet) fields of zlib so that we can use zlib module just as +# deflate library. DICTID won't be added as far as we don't set dictionary. +# LZ77 window of 32K will be used for both compression and decompression. +# For decompression, we can just use 32K to cover any windows size. For +# compression, we use 32K so receivers must use 32K. +# +# Compression level is Z_DEFAULT_COMPRESSION. We don't have to match level +# to decode. +# +# See zconf.h, deflate.cc, inflate.cc of zlib library, and zlibmodule.c of +# Python. See also RFC1950 (ZLIB 3.3). + + +class _Deflater(object): + + def __init__(self, window_bits): + self._logger = get_class_logger(self) + + self._compress = zlib.compressobj( + zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -window_bits) + + def compress(self, bytes): + compressed_bytes = self._compress.compress(bytes) + self._logger.debug('Compress input %r', bytes) + self._logger.debug('Compress result %r', compressed_bytes) + return compressed_bytes + + def compress_and_flush(self, bytes): + compressed_bytes = self._compress.compress(bytes) + compressed_bytes += self._compress.flush(zlib.Z_SYNC_FLUSH) + self._logger.debug('Compress input %r', bytes) + self._logger.debug('Compress result %r', compressed_bytes) + return compressed_bytes + + def compress_and_finish(self, bytes): + compressed_bytes = self._compress.compress(bytes) + compressed_bytes += self._compress.flush(zlib.Z_FINISH) + self._logger.debug('Compress input %r', bytes) + self._logger.debug('Compress result %r', compressed_bytes) + return compressed_bytes + +class _Inflater(object): + + def __init__(self): + self._logger = get_class_logger(self) + + self._unconsumed = '' + + self.reset() + + def decompress(self, size): + if not (size == -1 or size > 0): + raise Exception('size must be -1 or positive') + + data = '' + + while True: + if size == -1: + data += self._decompress.decompress(self._unconsumed) + # See Python bug http://bugs.python.org/issue12050 to + # understand why the same code cannot be used for updating + # self._unconsumed for here and else block. + self._unconsumed = '' + else: + data += self._decompress.decompress( + self._unconsumed, size - len(data)) + self._unconsumed = self._decompress.unconsumed_tail + if self._decompress.unused_data: + # Encountered a last block (i.e. a block with BFINAL = 1) and + # found a new stream (unused_data). We cannot use the same + # zlib.Decompress object for the new stream. Create a new + # Decompress object to decompress the new one. + # + # It's fine to ignore unconsumed_tail if unused_data is not + # empty. + self._unconsumed = self._decompress.unused_data + self.reset() + if size >= 0 and len(data) == size: + # data is filled. Don't call decompress again. + break + else: + # Re-invoke Decompress.decompress to try to decompress all + # available bytes before invoking read which blocks until + # any new byte is available. + continue + else: + # Here, since unused_data is empty, even if unconsumed_tail is + # not empty, bytes of requested length are already in data. We + # don't have to "continue" here. + break + + if data: + self._logger.debug('Decompressed %r', data) + return data + + def append(self, data): + self._logger.debug('Appended %r', data) + self._unconsumed += data + + def reset(self): + self._logger.debug('Reset') + self._decompress = zlib.decompressobj(-zlib.MAX_WBITS) + + +# Compresses/decompresses given octets using the method introduced in RFC1979. + + +class _RFC1979Deflater(object): + """A compressor class that applies DEFLATE to given byte sequence and + flushes using the algorithm described in the RFC1979 section 2.1. + """ + + def __init__(self, window_bits, no_context_takeover): + self._deflater = None + if window_bits is None: + window_bits = zlib.MAX_WBITS + self._window_bits = window_bits + self._no_context_takeover = no_context_takeover + + def filter(self, bytes, flush=True, bfinal=False): + if self._deflater is None or (self._no_context_takeover and flush): + self._deflater = _Deflater(self._window_bits) + + if bfinal: + result = self._deflater.compress_and_finish(bytes) + # Add a padding block with BFINAL = 0 and BTYPE = 0. + result = result + chr(0) + self._deflater = None + return result + if flush: + # Strip last 4 octets which is LEN and NLEN field of a + # non-compressed block added for Z_SYNC_FLUSH. + return self._deflater.compress_and_flush(bytes)[:-4] + return self._deflater.compress(bytes) + +class _RFC1979Inflater(object): + """A decompressor class for byte sequence compressed and flushed following + the algorithm described in the RFC1979 section 2.1. + """ + + def __init__(self): + self._inflater = _Inflater() + + def filter(self, bytes): + # Restore stripped LEN and NLEN field of a non-compressed block added + # for Z_SYNC_FLUSH. + self._inflater.append(bytes + '\x00\x00\xff\xff') + return self._inflater.decompress(-1) + + +class DeflateSocket(object): + """A wrapper class for socket object to intercept send and recv to perform + deflate compression and decompression transparently. + """ + + # Size of the buffer passed to recv to receive compressed data. + _RECV_SIZE = 4096 + + def __init__(self, socket): + self._socket = socket + + self._logger = get_class_logger(self) + + self._deflater = _Deflater(zlib.MAX_WBITS) + self._inflater = _Inflater() + + def recv(self, size): + """Receives data from the socket specified on the construction up + to the specified size. Once any data is available, returns it even + if it's smaller than the specified size. + """ + + # TODO(tyoshino): Allow call with size=0. It should block until any + # decompressed data is available. + if size <= 0: + raise Exception('Non-positive size passed') + while True: + data = self._inflater.decompress(size) + if len(data) != 0: + return data + + read_data = self._socket.recv(DeflateSocket._RECV_SIZE) + if not read_data: + return '' + self._inflater.append(read_data) + + def sendall(self, bytes): + self.send(bytes) + + def send(self, bytes): + self._socket.sendall(self._deflater.compress_and_flush(bytes)) + return len(bytes) + + +class DeflateConnection(object): + """A wrapper class for request object to intercept write and read to + perform deflate compression and decompression transparently. + """ + + def __init__(self, connection): + self._connection = connection + + self._logger = get_class_logger(self) + + self._deflater = _Deflater(zlib.MAX_WBITS) + self._inflater = _Inflater() + + def get_remote_addr(self): + return self._connection.remote_addr + remote_addr = property(get_remote_addr) + + def put_bytes(self, bytes): + self.write(bytes) + + def read(self, size=-1): + """Reads at most size bytes. Blocks until there's at least one byte + available. + """ + + # TODO(tyoshino): Allow call with size=0. + if not (size == -1 or size > 0): + raise Exception('size must be -1 or positive') + + data = '' + while True: + if size == -1: + data += self._inflater.decompress(-1) + else: + data += self._inflater.decompress(size - len(data)) + + if size >= 0 and len(data) != 0: + break + + # TODO(tyoshino): Make this read efficient by some workaround. + # + # In 3.0.3 and prior of mod_python, read blocks until length bytes + # was read. We don't know the exact size to read while using + # deflate, so read byte-by-byte. + # + # _StandaloneRequest.read that ultimately performs + # socket._fileobject.read also blocks until length bytes was read + read_data = self._connection.read(1) + if not read_data: + break + self._inflater.append(read_data) + return data + + def write(self, bytes): + self._connection.write(self._deflater.compress_and_flush(bytes)) + + +def _is_ewouldblock_errno(error_number): + """Returns True iff error_number indicates that receive operation would + block. To make this portable, we check availability of errno and then + compare them. + """ + + for error_name in ['WSAEWOULDBLOCK', 'EWOULDBLOCK', 'EAGAIN']: + if (error_name in dir(errno) and + error_number == getattr(errno, error_name)): + return True + return False + + +def drain_received_data(raw_socket): + # Set the socket non-blocking. + original_timeout = raw_socket.gettimeout() + raw_socket.settimeout(0.0) + + drained_data = [] + + # Drain until the socket is closed or no data is immediately + # available for read. + while True: + try: + data = raw_socket.recv(1) + if not data: + break + drained_data.append(data) + except socket.error, e: + # e can be either a pair (errno, string) or just a string (or + # something else) telling what went wrong. We suppress only + # the errors that indicates that the socket blocks. Those + # exceptions can be parsed as a pair (errno, string). + try: + error_number, message = e + except: + # Failed to parse socket.error. + raise e + + if _is_ewouldblock_errno(error_number): + break + else: + raise e + + # Rollback timeout value. + raw_socket.settimeout(original_timeout) + + return ''.join(drained_data) + + +# vi:sts=4 sw=4 et diff --git a/pyload/lib/new_collections.py b/pyload/lib/new_collections.py new file mode 100644 index 000000000..12d05b4b9 --- /dev/null +++ b/pyload/lib/new_collections.py @@ -0,0 +1,375 @@ +## {{{ http://code.activestate.com/recipes/576693/ (r9) +# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. +# Passes Python2.7's test suite and incorporates all the latest updates. + +try: + from thread import get_ident as _get_ident +except ImportError: + from dummy_thread import get_ident as _get_ident + +try: + from _abcoll import KeysView, ValuesView, ItemsView +except ImportError: + pass + + +class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as for regular dictionaries. + + # The internal self.__map dictionary maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # Each link is stored as a list of length three: [PREV, NEXT, KEY]. + + def __init__(self, *args, **kwds): + '''Initialize an ordered dictionary. Signature is the same as for + regular dictionaries, but keyword arguments are not recommended + because their insertion order is arbitrary. + + ''' + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__root = root = [] # sentinel node + root[:] = [root, root, None] + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, dict_setitem=dict.__setitem__): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link which goes at the end of the linked + # list, and the inherited dictionary is updated with the new key/value pair. + if key not in self: + root = self.__root + last = root[0] + last[1] = root[0] = self.__map[key] = [last, root, key] + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which is + # then removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link_prev, link_next, key = self.__map.pop(key) + link_prev[1] = link_next + link_next[0] = link_prev + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + root = self.__root + curr = root[1] + while curr is not root: + yield curr[2] + curr = curr[1] + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + root = self.__root + curr = root[0] + while curr is not root: + yield curr[2] + curr = curr[0] + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + try: + for node in self.__map.itervalues(): + del node[:] + root = self.__root + root[:] = [root, root, None] + self.__map.clear() + except AttributeError: + pass + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root[0] + link_prev = link[0] + link_prev[1] = root + root[0] = link_prev + else: + link = root[1] + link_next = link[1] + root[1] = link_next + link_next[0] = root + key = link[2] + del self.__map[key] + value = dict.pop(self, key) + return key, value + + # -- the following methods do not depend on the internal structure -- + + def keys(self): + 'od.keys() -> list of keys in od' + return list(self) + + def values(self): + 'od.values() -> list of values in od' + return [self[key] for key in self] + + def items(self): + 'od.items() -> list of (key, value) pairs in od' + return [(key, self[key]) for key in self] + + def iterkeys(self): + 'od.iterkeys() -> an iterator over the keys in od' + return iter(self) + + def itervalues(self): + 'od.itervalues -> an iterator over the values in od' + for k in self: + yield self[k] + + def iteritems(self): + 'od.iteritems -> an iterator over the (key, value) items in od' + for k in self: + yield (k, self[k]) + + def update(*args, **kwds): + '''od.update(E, **F) -> None. Update od from dict/iterable E and F. + + If E is a dict instance, does: for k in E: od[k] = E[k] + If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] + Or if E is an iterable of items, does: for k, v in E: od[k] = v + In either case, this is followed by: for k, v in F.items(): od[k] = v + + ''' + if len(args) > 2: + raise TypeError('update() takes at most 2 positional ' + 'arguments (%d given)' % (len(args),)) + elif not args: + raise TypeError('update() takes at least 1 argument (0 given)') + self = args[0] + # Make progressively weaker assumptions about "other" + other = () + if len(args) == 2: + other = args[1] + if isinstance(other, dict): + for key in other: + self[key] = other[key] + elif hasattr(other, 'keys'): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + __update = update # let subclasses override update without breaking __init__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + def __repr__(self, _repr_running={}): + 'od.__repr__() <==> repr(od)' + call_key = id(self), _get_ident() + if call_key in _repr_running: + return '...' + _repr_running[call_key] = 1 + try: + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + finally: + del _repr_running[call_key] + + def __reduce__(self): + 'Return state information for pickling' + items = [[k, self[k]] for k in self] + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S + and values equal to v (which defaults to None). + + ''' + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return len(self)==len(other) and self.items() == other.items() + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other + + # -- the following methods are only used in Python 2.7 -- + + def viewkeys(self): + "od.viewkeys() -> a set-like object providing a view on od's keys" + return KeysView(self) + + def viewvalues(self): + "od.viewvalues() -> an object providing a view on od's values" + return ValuesView(self) + + def viewitems(self): + "od.viewitems() -> a set-like object providing a view on od's items" + return ItemsView(self) +## end of http://code.activestate.com/recipes/576693/ }}} + +## {{{ http://code.activestate.com/recipes/500261/ (r15) +from operator import itemgetter as _itemgetter +from keyword import iskeyword as _iskeyword +import sys as _sys + +def namedtuple(typename, field_names, verbose=False, rename=False): + """Returns a new subclass of tuple with named fields. + + >>> Point = namedtuple('Point', 'x y') + >>> Point.__doc__ # docstring for the new class + 'Point(x, y)' + >>> p = Point(11, y=22) # instantiate with positional args or keywords + >>> p[0] + p[1] # indexable like a plain tuple + 33 + >>> x, y = p # unpack like a regular tuple + >>> x, y + (11, 22) + >>> p.x + p.y # fields also accessable by name + 33 + >>> d = p._asdict() # convert to a dictionary + >>> d['x'] + 11 + >>> Point(**d) # convert from a dictionary + Point(x=11, y=22) + >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields + Point(x=100, y=22) + + """ + + # Parse and validate the field names. Validation serves two purposes, + # generating informative error messages and preventing template injection attacks. + if isinstance(field_names, basestring): + field_names = field_names.replace(',', ' ').split() # names separated by whitespace and/or commas + field_names = tuple(map(str, field_names)) + if rename: + names = list(field_names) + seen = set() + for i, name in enumerate(names): + if (not min(c.isalnum() or c=='_' for c in name) or _iskeyword(name) + or not name or name[0].isdigit() or name.startswith('_') + or name in seen): + names[i] = '_%d' % i + seen.add(name) + field_names = tuple(names) + for name in (typename,) + field_names: + if not min(c.isalnum() or c=='_' for c in name): + raise ValueError('Type names and field names can only contain alphanumeric characters and underscores: %r' % name) + if _iskeyword(name): + raise ValueError('Type names and field names cannot be a keyword: %r' % name) + if name[0].isdigit(): + raise ValueError('Type names and field names cannot start with a number: %r' % name) + seen_names = set() + for name in field_names: + if name.startswith('_') and not rename: + raise ValueError('Field names cannot start with an underscore: %r' % name) + if name in seen_names: + raise ValueError('Encountered duplicate field name: %r' % name) + seen_names.add(name) + + # Create and fill-in the class template + numfields = len(field_names) + argtxt = repr(field_names).replace("'", "")[1:-1] # tuple repr without parens or quotes + reprtxt = ', '.join('%s=%%r' % name for name in field_names) + template = '''class %(typename)s(tuple): + '%(typename)s(%(argtxt)s)' \n + __slots__ = () \n + _fields = %(field_names)r \n + def __new__(_cls, %(argtxt)s): + return _tuple.__new__(_cls, (%(argtxt)s)) \n + @classmethod + def _make(cls, iterable, new=tuple.__new__, len=len): + 'Make a new %(typename)s object from a sequence or iterable' + result = new(cls, iterable) + if len(result) != %(numfields)d: + raise TypeError('Expected %(numfields)d arguments, got %%d' %% len(result)) + return result \n + def __repr__(self): + return '%(typename)s(%(reprtxt)s)' %% self \n + def _asdict(self): + 'Return a new dict which maps field names to their values' + return dict(zip(self._fields, self)) \n + def _replace(_self, **kwds): + 'Return a new %(typename)s object replacing specified fields with new values' + result = _self._make(map(kwds.pop, %(field_names)r, _self)) + if kwds: + raise ValueError('Got unexpected field names: %%r' %% kwds.keys()) + return result \n + def __getnewargs__(self): + return tuple(self) \n\n''' % locals() + for i, name in enumerate(field_names): + template += ' %s = _property(_itemgetter(%d))\n' % (name, i) + if verbose: + print template + + # Execute the template string in a temporary namespace + namespace = dict(_itemgetter=_itemgetter, __name__='namedtuple_%s' % typename, + _property=property, _tuple=tuple) + try: + exec template in namespace + except SyntaxError, e: + raise SyntaxError(e.message + ':\n' + template) + result = namespace[typename] + + # For pickling to work, the __module__ variable needs to be set to the frame + # where the named tuple is created. Bypass this step in enviroments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + try: + result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + + return result +## end of http://code.activestate.com/recipes/500261/ }}} diff --git a/pyload/lib/rename_process.py b/pyload/lib/rename_process.py new file mode 100644 index 000000000..2527cef39 --- /dev/null +++ b/pyload/lib/rename_process.py @@ -0,0 +1,14 @@ +import sys + +def renameProcess(new_name): + """ Renames the process calling the function to the given name. """ + if sys.platform != 'linux2': + return False + try: + from ctypes import CDLL + libc = CDLL('libc.so.6') + libc.prctl(15, new_name, 0, 0, 0) + return True + except Exception, e: + #print "Rename process failed", e + return False diff --git a/pyload/lib/simplejson/__init__.py b/pyload/lib/simplejson/__init__.py new file mode 100644 index 000000000..ef5c0db48 --- /dev/null +++ b/pyload/lib/simplejson/__init__.py @@ -0,0 +1,466 @@ +r"""JSON (JavaScript Object Notation) is a subset of +JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data +interchange format. + +:mod:`simplejson` exposes an API familiar to users of the standard library +:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained +version of the :mod:`json` library contained in Python 2.6, but maintains +compatibility with Python 2.4 and Python 2.5 and (currently) has +significant performance advantages, even without using the optional C +extension for speedups. + +Encoding basic Python object hierarchies:: + + >>> import simplejson as json + >>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}]) + '["foo", {"bar": ["baz", null, 1.0, 2]}]' + >>> print json.dumps("\"foo\bar") + "\"foo\bar" + >>> print json.dumps(u'\u1234') + "\u1234" + >>> print json.dumps('\\') + "\\" + >>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True) + {"a": 0, "b": 0, "c": 0} + >>> from StringIO import StringIO + >>> io = StringIO() + >>> json.dump(['streaming API'], io) + >>> io.getvalue() + '["streaming API"]' + +Compact encoding:: + + >>> import simplejson as json + >>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':')) + '[1,2,3,{"4":5,"6":7}]' + +Pretty printing:: + + >>> import simplejson as json + >>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=' ') + >>> print '\n'.join([l.rstrip() for l in s.splitlines()]) + { + "4": 5, + "6": 7 + } + +Decoding JSON:: + + >>> import simplejson as json + >>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}] + >>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj + True + >>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar' + True + >>> from StringIO import StringIO + >>> io = StringIO('["streaming API"]') + >>> json.load(io)[0] == 'streaming API' + True + +Specializing JSON object decoding:: + + >>> import simplejson as json + >>> def as_complex(dct): + ... if '__complex__' in dct: + ... return complex(dct['real'], dct['imag']) + ... return dct + ... + >>> json.loads('{"__complex__": true, "real": 1, "imag": 2}', + ... object_hook=as_complex) + (1+2j) + >>> from decimal import Decimal + >>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1') + True + +Specializing JSON object encoding:: + + >>> import simplejson as json + >>> def encode_complex(obj): + ... if isinstance(obj, complex): + ... return [obj.real, obj.imag] + ... raise TypeError(repr(o) + " is not JSON serializable") + ... + >>> json.dumps(2 + 1j, default=encode_complex) + '[2.0, 1.0]' + >>> json.JSONEncoder(default=encode_complex).encode(2 + 1j) + '[2.0, 1.0]' + >>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j)) + '[2.0, 1.0]' + + +Using simplejson.tool from the shell to validate and pretty-print:: + + $ echo '{"json":"obj"}' | python -m simplejson.tool + { + "json": "obj" + } + $ echo '{ 1.2:3.4}' | python -m simplejson.tool + Expecting property name: line 1 column 2 (char 2) +""" +__version__ = '2.2.1' +__all__ = [ + 'dump', 'dumps', 'load', 'loads', + 'JSONDecoder', 'JSONDecodeError', 'JSONEncoder', + 'OrderedDict', +] + +__author__ = 'Bob Ippolito ' + +from decimal import Decimal + +from decoder import JSONDecoder, JSONDecodeError +from encoder import JSONEncoder +def _import_OrderedDict(): + import collections + try: + return collections.OrderedDict + except AttributeError: + import ordered_dict + return ordered_dict.OrderedDict +OrderedDict = _import_OrderedDict() + +def _import_c_make_encoder(): + try: + from simplejson._speedups import make_encoder + return make_encoder + except ImportError: + return None + +_default_encoder = JSONEncoder( + skipkeys=False, + ensure_ascii=True, + check_circular=True, + allow_nan=True, + indent=None, + separators=None, + encoding='utf-8', + default=None, + use_decimal=True, + namedtuple_as_object=True, + tuple_as_array=True, +) + +def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, + allow_nan=True, cls=None, indent=None, separators=None, + encoding='utf-8', default=None, use_decimal=True, + namedtuple_as_object=True, tuple_as_array=True, + **kw): + """Serialize ``obj`` as a JSON formatted stream to ``fp`` (a + ``.write()``-supporting file-like object). + + If ``skipkeys`` is true then ``dict`` keys that are not basic types + (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) + will be skipped instead of raising a ``TypeError``. + + If ``ensure_ascii`` is false, then the some chunks written to ``fp`` + may be ``unicode`` instances, subject to normal Python ``str`` to + ``unicode`` coercion rules. Unless ``fp.write()`` explicitly + understands ``unicode`` (as in ``codecs.getwriter()``) this is likely + to cause an error. + + If ``check_circular`` is false, then the circular reference check + for container types will be skipped and a circular reference will + result in an ``OverflowError`` (or worse). + + If ``allow_nan`` is false, then it will be a ``ValueError`` to + serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) + in strict compliance of the JSON specification, instead of using the + JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). + + If *indent* is a string, then JSON array elements and object members + will be pretty-printed with a newline followed by that string repeated + for each level of nesting. ``None`` (the default) selects the most compact + representation without any newlines. For backwards compatibility with + versions of simplejson earlier than 2.1.0, an integer is also accepted + and is converted to a string with that many spaces. + + If ``separators`` is an ``(item_separator, dict_separator)`` tuple + then it will be used instead of the default ``(', ', ': ')`` separators. + ``(',', ':')`` is the most compact JSON representation. + + ``encoding`` is the character encoding for str instances, default is UTF-8. + + ``default(obj)`` is a function that should return a serializable version + of obj or raise TypeError. The default simply raises TypeError. + + If *use_decimal* is true (default: ``True``) then decimal.Decimal + will be natively serialized to JSON with full precision. + + If *namedtuple_as_object* is true (default: ``True``), + :class:`tuple` subclasses with ``_asdict()`` methods will be encoded + as JSON objects. + + If *tuple_as_array* is true (default: ``True``), + :class:`tuple` (and subclasses) will be encoded as JSON arrays. + + To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the + ``.default()`` method to serialize additional types), specify it with + the ``cls`` kwarg. + + """ + # cached encoder + if (not skipkeys and ensure_ascii and + check_circular and allow_nan and + cls is None and indent is None and separators is None and + encoding == 'utf-8' and default is None and use_decimal + and namedtuple_as_object and tuple_as_array and not kw): + iterable = _default_encoder.iterencode(obj) + else: + if cls is None: + cls = JSONEncoder + iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii, + check_circular=check_circular, allow_nan=allow_nan, indent=indent, + separators=separators, encoding=encoding, + default=default, use_decimal=use_decimal, + namedtuple_as_object=namedtuple_as_object, + tuple_as_array=tuple_as_array, + **kw).iterencode(obj) + # could accelerate with writelines in some versions of Python, at + # a debuggability cost + for chunk in iterable: + fp.write(chunk) + + +def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, + allow_nan=True, cls=None, indent=None, separators=None, + encoding='utf-8', default=None, use_decimal=True, + namedtuple_as_object=True, + tuple_as_array=True, + **kw): + """Serialize ``obj`` to a JSON formatted ``str``. + + If ``skipkeys`` is false then ``dict`` keys that are not basic types + (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) + will be skipped instead of raising a ``TypeError``. + + If ``ensure_ascii`` is false, then the return value will be a + ``unicode`` instance subject to normal Python ``str`` to ``unicode`` + coercion rules instead of being escaped to an ASCII ``str``. + + If ``check_circular`` is false, then the circular reference check + for container types will be skipped and a circular reference will + result in an ``OverflowError`` (or worse). + + If ``allow_nan`` is false, then it will be a ``ValueError`` to + serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in + strict compliance of the JSON specification, instead of using the + JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). + + If ``indent`` is a string, then JSON array elements and object members + will be pretty-printed with a newline followed by that string repeated + for each level of nesting. ``None`` (the default) selects the most compact + representation without any newlines. For backwards compatibility with + versions of simplejson earlier than 2.1.0, an integer is also accepted + and is converted to a string with that many spaces. + + If ``separators`` is an ``(item_separator, dict_separator)`` tuple + then it will be used instead of the default ``(', ', ': ')`` separators. + ``(',', ':')`` is the most compact JSON representation. + + ``encoding`` is the character encoding for str instances, default is UTF-8. + + ``default(obj)`` is a function that should return a serializable version + of obj or raise TypeError. The default simply raises TypeError. + + If *use_decimal* is true (default: ``True``) then decimal.Decimal + will be natively serialized to JSON with full precision. + + If *namedtuple_as_object* is true (default: ``True``), + :class:`tuple` subclasses with ``_asdict()`` methods will be encoded + as JSON objects. + + If *tuple_as_array* is true (default: ``True``), + :class:`tuple` (and subclasses) will be encoded as JSON arrays. + + To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the + ``.default()`` method to serialize additional types), specify it with + the ``cls`` kwarg. + + """ + # cached encoder + if (not skipkeys and ensure_ascii and + check_circular and allow_nan and + cls is None and indent is None and separators is None and + encoding == 'utf-8' and default is None and use_decimal + and namedtuple_as_object and tuple_as_array and not kw): + return _default_encoder.encode(obj) + if cls is None: + cls = JSONEncoder + return cls( + skipkeys=skipkeys, ensure_ascii=ensure_ascii, + check_circular=check_circular, allow_nan=allow_nan, indent=indent, + separators=separators, encoding=encoding, default=default, + use_decimal=use_decimal, + namedtuple_as_object=namedtuple_as_object, + tuple_as_array=tuple_as_array, + **kw).encode(obj) + + +_default_decoder = JSONDecoder(encoding=None, object_hook=None, + object_pairs_hook=None) + + +def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None, + parse_int=None, parse_constant=None, object_pairs_hook=None, + use_decimal=False, namedtuple_as_object=True, tuple_as_array=True, + **kw): + """Deserialize ``fp`` (a ``.read()``-supporting file-like object containing + a JSON document) to a Python object. + + *encoding* determines the encoding used to interpret any + :class:`str` objects decoded by this instance (``'utf-8'`` by + default). It has no effect when decoding :class:`unicode` objects. + + Note that currently only encodings that are a superset of ASCII work, + strings of other encodings should be passed in as :class:`unicode`. + + *object_hook*, if specified, will be called with the result of every + JSON object decoded and its return value will be used in place of the + given :class:`dict`. This can be used to provide custom + deserializations (e.g. to support JSON-RPC class hinting). + + *object_pairs_hook* is an optional function that will be called with + the result of any object literal decode with an ordered list of pairs. + The return value of *object_pairs_hook* will be used instead of the + :class:`dict`. This feature can be used to implement custom decoders + that rely on the order that the key and value pairs are decoded (for + example, :func:`collections.OrderedDict` will remember the order of + insertion). If *object_hook* is also defined, the *object_pairs_hook* + takes priority. + + *parse_float*, if specified, will be called with the string of every + JSON float to be decoded. By default, this is equivalent to + ``float(num_str)``. This can be used to use another datatype or parser + for JSON floats (e.g. :class:`decimal.Decimal`). + + *parse_int*, if specified, will be called with the string of every + JSON int to be decoded. By default, this is equivalent to + ``int(num_str)``. This can be used to use another datatype or parser + for JSON integers (e.g. :class:`float`). + + *parse_constant*, if specified, will be called with one of the + following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This + can be used to raise an exception if invalid JSON numbers are + encountered. + + If *use_decimal* is true (default: ``False``) then it implies + parse_float=decimal.Decimal for parity with ``dump``. + + To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` + kwarg. + + """ + return loads(fp.read(), + encoding=encoding, cls=cls, object_hook=object_hook, + parse_float=parse_float, parse_int=parse_int, + parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, + use_decimal=use_decimal, **kw) + + +def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, + parse_int=None, parse_constant=None, object_pairs_hook=None, + use_decimal=False, **kw): + """Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON + document) to a Python object. + + *encoding* determines the encoding used to interpret any + :class:`str` objects decoded by this instance (``'utf-8'`` by + default). It has no effect when decoding :class:`unicode` objects. + + Note that currently only encodings that are a superset of ASCII work, + strings of other encodings should be passed in as :class:`unicode`. + + *object_hook*, if specified, will be called with the result of every + JSON object decoded and its return value will be used in place of the + given :class:`dict`. This can be used to provide custom + deserializations (e.g. to support JSON-RPC class hinting). + + *object_pairs_hook* is an optional function that will be called with + the result of any object literal decode with an ordered list of pairs. + The return value of *object_pairs_hook* will be used instead of the + :class:`dict`. This feature can be used to implement custom decoders + that rely on the order that the key and value pairs are decoded (for + example, :func:`collections.OrderedDict` will remember the order of + insertion). If *object_hook* is also defined, the *object_pairs_hook* + takes priority. + + *parse_float*, if specified, will be called with the string of every + JSON float to be decoded. By default, this is equivalent to + ``float(num_str)``. This can be used to use another datatype or parser + for JSON floats (e.g. :class:`decimal.Decimal`). + + *parse_int*, if specified, will be called with the string of every + JSON int to be decoded. By default, this is equivalent to + ``int(num_str)``. This can be used to use another datatype or parser + for JSON integers (e.g. :class:`float`). + + *parse_constant*, if specified, will be called with one of the + following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This + can be used to raise an exception if invalid JSON numbers are + encountered. + + If *use_decimal* is true (default: ``False``) then it implies + parse_float=decimal.Decimal for parity with ``dump``. + + To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` + kwarg. + + """ + if (cls is None and encoding is None and object_hook is None and + parse_int is None and parse_float is None and + parse_constant is None and object_pairs_hook is None + and not use_decimal and not kw): + return _default_decoder.decode(s) + if cls is None: + cls = JSONDecoder + if object_hook is not None: + kw['object_hook'] = object_hook + if object_pairs_hook is not None: + kw['object_pairs_hook'] = object_pairs_hook + if parse_float is not None: + kw['parse_float'] = parse_float + if parse_int is not None: + kw['parse_int'] = parse_int + if parse_constant is not None: + kw['parse_constant'] = parse_constant + if use_decimal: + if parse_float is not None: + raise TypeError("use_decimal=True implies parse_float=Decimal") + kw['parse_float'] = Decimal + return cls(encoding=encoding, **kw).decode(s) + + +def _toggle_speedups(enabled): + import simplejson.decoder as dec + import simplejson.encoder as enc + import simplejson.scanner as scan + c_make_encoder = _import_c_make_encoder() + if enabled: + dec.scanstring = dec.c_scanstring or dec.py_scanstring + enc.c_make_encoder = c_make_encoder + enc.encode_basestring_ascii = (enc.c_encode_basestring_ascii or + enc.py_encode_basestring_ascii) + scan.make_scanner = scan.c_make_scanner or scan.py_make_scanner + else: + dec.scanstring = dec.py_scanstring + enc.c_make_encoder = None + enc.encode_basestring_ascii = enc.py_encode_basestring_ascii + scan.make_scanner = scan.py_make_scanner + dec.make_scanner = scan.make_scanner + global _default_decoder + _default_decoder = JSONDecoder( + encoding=None, + object_hook=None, + object_pairs_hook=None, + ) + global _default_encoder + _default_encoder = JSONEncoder( + skipkeys=False, + ensure_ascii=True, + check_circular=True, + allow_nan=True, + indent=None, + separators=None, + encoding='utf-8', + default=None, + ) diff --git a/pyload/lib/simplejson/decoder.py b/pyload/lib/simplejson/decoder.py new file mode 100644 index 000000000..e5496d6e7 --- /dev/null +++ b/pyload/lib/simplejson/decoder.py @@ -0,0 +1,421 @@ +"""Implementation of JSONDecoder +""" +import re +import sys +import struct + +from simplejson.scanner import make_scanner +def _import_c_scanstring(): + try: + from simplejson._speedups import scanstring + return scanstring + except ImportError: + return None +c_scanstring = _import_c_scanstring() + +__all__ = ['JSONDecoder'] + +FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL + +def _floatconstants(): + _BYTES = '7FF80000000000007FF0000000000000'.decode('hex') + # The struct module in Python 2.4 would get frexp() out of range here + # when an endian is specified in the format string. Fixed in Python 2.5+ + if sys.byteorder != 'big': + _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1] + nan, inf = struct.unpack('dd', _BYTES) + return nan, inf, -inf + +NaN, PosInf, NegInf = _floatconstants() + + +class JSONDecodeError(ValueError): + """Subclass of ValueError with the following additional properties: + + msg: The unformatted error message + doc: The JSON document being parsed + pos: The start index of doc where parsing failed + end: The end index of doc where parsing failed (may be None) + lineno: The line corresponding to pos + colno: The column corresponding to pos + endlineno: The line corresponding to end (may be None) + endcolno: The column corresponding to end (may be None) + + """ + def __init__(self, msg, doc, pos, end=None): + ValueError.__init__(self, errmsg(msg, doc, pos, end=end)) + self.msg = msg + self.doc = doc + self.pos = pos + self.end = end + self.lineno, self.colno = linecol(doc, pos) + if end is not None: + self.endlineno, self.endcolno = linecol(doc, end) + else: + self.endlineno, self.endcolno = None, None + + +def linecol(doc, pos): + lineno = doc.count('\n', 0, pos) + 1 + if lineno == 1: + colno = pos + else: + colno = pos - doc.rindex('\n', 0, pos) + return lineno, colno + + +def errmsg(msg, doc, pos, end=None): + # Note that this function is called from _speedups + lineno, colno = linecol(doc, pos) + if end is None: + #fmt = '{0}: line {1} column {2} (char {3})' + #return fmt.format(msg, lineno, colno, pos) + fmt = '%s: line %d column %d (char %d)' + return fmt % (msg, lineno, colno, pos) + endlineno, endcolno = linecol(doc, end) + #fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})' + #return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end) + fmt = '%s: line %d column %d - line %d column %d (char %d - %d)' + return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end) + + +_CONSTANTS = { + '-Infinity': NegInf, + 'Infinity': PosInf, + 'NaN': NaN, +} + +STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS) +BACKSLASH = { + '"': u'"', '\\': u'\\', '/': u'/', + 'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t', +} + +DEFAULT_ENCODING = "utf-8" + +def py_scanstring(s, end, encoding=None, strict=True, + _b=BACKSLASH, _m=STRINGCHUNK.match): + """Scan the string s for a JSON string. End is the index of the + character in s after the quote that started the JSON string. + Unescapes all valid JSON string escape sequences and raises ValueError + on attempt to decode an invalid string. If strict is False then literal + control characters are allowed in the string. + + Returns a tuple of the decoded string and the index of the character in s + after the end quote.""" + if encoding is None: + encoding = DEFAULT_ENCODING + chunks = [] + _append = chunks.append + begin = end - 1 + while 1: + chunk = _m(s, end) + if chunk is None: + raise JSONDecodeError( + "Unterminated string starting at", s, begin) + end = chunk.end() + content, terminator = chunk.groups() + # Content is contains zero or more unescaped string characters + if content: + if not isinstance(content, unicode): + content = unicode(content, encoding) + _append(content) + # Terminator is the end of string, a literal control character, + # or a backslash denoting that an escape sequence follows + if terminator == '"': + break + elif terminator != '\\': + if strict: + msg = "Invalid control character %r at" % (terminator,) + #msg = "Invalid control character {0!r} at".format(terminator) + raise JSONDecodeError(msg, s, end) + else: + _append(terminator) + continue + try: + esc = s[end] + except IndexError: + raise JSONDecodeError( + "Unterminated string starting at", s, begin) + # If not a unicode escape sequence, must be in the lookup table + if esc != 'u': + try: + char = _b[esc] + except KeyError: + msg = "Invalid \\escape: " + repr(esc) + raise JSONDecodeError(msg, s, end) + end += 1 + else: + # Unicode escape sequence + esc = s[end + 1:end + 5] + next_end = end + 5 + if len(esc) != 4: + msg = "Invalid \\uXXXX escape" + raise JSONDecodeError(msg, s, end) + uni = int(esc, 16) + # Check for surrogate pair on UCS-4 systems + if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535: + msg = "Invalid \\uXXXX\\uXXXX surrogate pair" + if not s[end + 5:end + 7] == '\\u': + raise JSONDecodeError(msg, s, end) + esc2 = s[end + 7:end + 11] + if len(esc2) != 4: + raise JSONDecodeError(msg, s, end) + uni2 = int(esc2, 16) + uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00)) + next_end += 6 + char = unichr(uni) + end = next_end + # Append the unescaped character + _append(char) + return u''.join(chunks), end + + +# Use speedup if available +scanstring = c_scanstring or py_scanstring + +WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS) +WHITESPACE_STR = ' \t\n\r' + +def JSONObject((s, end), encoding, strict, scan_once, object_hook, + object_pairs_hook, memo=None, + _w=WHITESPACE.match, _ws=WHITESPACE_STR): + # Backwards compatibility + if memo is None: + memo = {} + memo_get = memo.setdefault + pairs = [] + # Use a slice to prevent IndexError from being raised, the following + # check will raise a more specific ValueError if the string is empty + nextchar = s[end:end + 1] + # Normally we expect nextchar == '"' + if nextchar != '"': + if nextchar in _ws: + end = _w(s, end).end() + nextchar = s[end:end + 1] + # Trivial empty object + if nextchar == '}': + if object_pairs_hook is not None: + result = object_pairs_hook(pairs) + return result, end + 1 + pairs = {} + if object_hook is not None: + pairs = object_hook(pairs) + return pairs, end + 1 + elif nextchar != '"': + raise JSONDecodeError("Expecting property name", s, end) + end += 1 + while True: + key, end = scanstring(s, end, encoding, strict) + key = memo_get(key, key) + + # To skip some function call overhead we optimize the fast paths where + # the JSON key separator is ": " or just ":". + if s[end:end + 1] != ':': + end = _w(s, end).end() + if s[end:end + 1] != ':': + raise JSONDecodeError("Expecting : delimiter", s, end) + + end += 1 + + try: + if s[end] in _ws: + end += 1 + if s[end] in _ws: + end = _w(s, end + 1).end() + except IndexError: + pass + + try: + value, end = scan_once(s, end) + except StopIteration: + raise JSONDecodeError("Expecting object", s, end) + pairs.append((key, value)) + + try: + nextchar = s[end] + if nextchar in _ws: + end = _w(s, end + 1).end() + nextchar = s[end] + except IndexError: + nextchar = '' + end += 1 + + if nextchar == '}': + break + elif nextchar != ',': + raise JSONDecodeError("Expecting , delimiter", s, end - 1) + + try: + nextchar = s[end] + if nextchar in _ws: + end += 1 + nextchar = s[end] + if nextchar in _ws: + end = _w(s, end + 1).end() + nextchar = s[end] + except IndexError: + nextchar = '' + + end += 1 + if nextchar != '"': + raise JSONDecodeError("Expecting property name", s, end - 1) + + if object_pairs_hook is not None: + result = object_pairs_hook(pairs) + return result, end + pairs = dict(pairs) + if object_hook is not None: + pairs = object_hook(pairs) + return pairs, end + +def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR): + values = [] + nextchar = s[end:end + 1] + if nextchar in _ws: + end = _w(s, end + 1).end() + nextchar = s[end:end + 1] + # Look-ahead for trivial empty array + if nextchar == ']': + return values, end + 1 + _append = values.append + while True: + try: + value, end = scan_once(s, end) + except StopIteration: + raise JSONDecodeError("Expecting object", s, end) + _append(value) + nextchar = s[end:end + 1] + if nextchar in _ws: + end = _w(s, end + 1).end() + nextchar = s[end:end + 1] + end += 1 + if nextchar == ']': + break + elif nextchar != ',': + raise JSONDecodeError("Expecting , delimiter", s, end) + + try: + if s[end] in _ws: + end += 1 + if s[end] in _ws: + end = _w(s, end + 1).end() + except IndexError: + pass + + return values, end + +class JSONDecoder(object): + """Simple JSON decoder + + Performs the following translations in decoding by default: + + +---------------+-------------------+ + | JSON | Python | + +===============+===================+ + | object | dict | + +---------------+-------------------+ + | array | list | + +---------------+-------------------+ + | string | unicode | + +---------------+-------------------+ + | number (int) | int, long | + +---------------+-------------------+ + | number (real) | float | + +---------------+-------------------+ + | true | True | + +---------------+-------------------+ + | false | False | + +---------------+-------------------+ + | null | None | + +---------------+-------------------+ + + It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as + their corresponding ``float`` values, which is outside the JSON spec. + + """ + + def __init__(self, encoding=None, object_hook=None, parse_float=None, + parse_int=None, parse_constant=None, strict=True, + object_pairs_hook=None): + """ + *encoding* determines the encoding used to interpret any + :class:`str` objects decoded by this instance (``'utf-8'`` by + default). It has no effect when decoding :class:`unicode` objects. + + Note that currently only encodings that are a superset of ASCII work, + strings of other encodings should be passed in as :class:`unicode`. + + *object_hook*, if specified, will be called with the result of every + JSON object decoded and its return value will be used in place of the + given :class:`dict`. This can be used to provide custom + deserializations (e.g. to support JSON-RPC class hinting). + + *object_pairs_hook* is an optional function that will be called with + the result of any object literal decode with an ordered list of pairs. + The return value of *object_pairs_hook* will be used instead of the + :class:`dict`. This feature can be used to implement custom decoders + that rely on the order that the key and value pairs are decoded (for + example, :func:`collections.OrderedDict` will remember the order of + insertion). If *object_hook* is also defined, the *object_pairs_hook* + takes priority. + + *parse_float*, if specified, will be called with the string of every + JSON float to be decoded. By default, this is equivalent to + ``float(num_str)``. This can be used to use another datatype or parser + for JSON floats (e.g. :class:`decimal.Decimal`). + + *parse_int*, if specified, will be called with the string of every + JSON int to be decoded. By default, this is equivalent to + ``int(num_str)``. This can be used to use another datatype or parser + for JSON integers (e.g. :class:`float`). + + *parse_constant*, if specified, will be called with one of the + following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This + can be used to raise an exception if invalid JSON numbers are + encountered. + + *strict* controls the parser's behavior when it encounters an + invalid control character in a string. The default setting of + ``True`` means that unescaped control characters are parse errors, if + ``False`` then control characters will be allowed in strings. + + """ + self.encoding = encoding + self.object_hook = object_hook + self.object_pairs_hook = object_pairs_hook + self.parse_float = parse_float or float + self.parse_int = parse_int or int + self.parse_constant = parse_constant or _CONSTANTS.__getitem__ + self.strict = strict + self.parse_object = JSONObject + self.parse_array = JSONArray + self.parse_string = scanstring + self.memo = {} + self.scan_once = make_scanner(self) + + def decode(self, s, _w=WHITESPACE.match): + """Return the Python representation of ``s`` (a ``str`` or ``unicode`` + instance containing a JSON document) + + """ + obj, end = self.raw_decode(s, idx=_w(s, 0).end()) + end = _w(s, end).end() + if end != len(s): + raise JSONDecodeError("Extra data", s, end, len(s)) + return obj + + def raw_decode(self, s, idx=0): + """Decode a JSON document from ``s`` (a ``str`` or ``unicode`` + beginning with a JSON document) and return a 2-tuple of the Python + representation and the index in ``s`` where the document ended. + + This can be used to decode a JSON document from a string that may + have extraneous data at the end. + + """ + try: + obj, end = self.scan_once(s, idx) + except StopIteration: + raise JSONDecodeError("No JSON object could be decoded", s, idx) + return obj, end diff --git a/pyload/lib/simplejson/encoder.py b/pyload/lib/simplejson/encoder.py new file mode 100644 index 000000000..5ec7440f1 --- /dev/null +++ b/pyload/lib/simplejson/encoder.py @@ -0,0 +1,534 @@ +"""Implementation of JSONEncoder +""" +import re +from decimal import Decimal + +def _import_speedups(): + try: + from simplejson import _speedups + return _speedups.encode_basestring_ascii, _speedups.make_encoder + except ImportError: + return None, None +c_encode_basestring_ascii, c_make_encoder = _import_speedups() + +from simplejson.decoder import PosInf + +ESCAPE = re.compile(ur'[\x00-\x1f\\"\b\f\n\r\t\u2028\u2029]') +ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])') +HAS_UTF8 = re.compile(r'[\x80-\xff]') +ESCAPE_DCT = { + '\\': '\\\\', + '"': '\\"', + '\b': '\\b', + '\f': '\\f', + '\n': '\\n', + '\r': '\\r', + '\t': '\\t', + u'\u2028': '\\u2028', + u'\u2029': '\\u2029', +} +for i in range(0x20): + #ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i)) + ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,)) + +FLOAT_REPR = repr + +def encode_basestring(s): + """Return a JSON representation of a Python string + + """ + if isinstance(s, str) and HAS_UTF8.search(s) is not None: + s = s.decode('utf-8') + def replace(match): + return ESCAPE_DCT[match.group(0)] + return u'"' + ESCAPE.sub(replace, s) + u'"' + + +def py_encode_basestring_ascii(s): + """Return an ASCII-only JSON representation of a Python string + + """ + if isinstance(s, str) and HAS_UTF8.search(s) is not None: + s = s.decode('utf-8') + def replace(match): + s = match.group(0) + try: + return ESCAPE_DCT[s] + except KeyError: + n = ord(s) + if n < 0x10000: + #return '\\u{0:04x}'.format(n) + return '\\u%04x' % (n,) + else: + # surrogate pair + n -= 0x10000 + s1 = 0xd800 | ((n >> 10) & 0x3ff) + s2 = 0xdc00 | (n & 0x3ff) + #return '\\u{0:04x}\\u{1:04x}'.format(s1, s2) + return '\\u%04x\\u%04x' % (s1, s2) + return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"' + + +encode_basestring_ascii = ( + c_encode_basestring_ascii or py_encode_basestring_ascii) + +class JSONEncoder(object): + """Extensible JSON encoder for Python data structures. + + Supports the following objects and types by default: + + +-------------------+---------------+ + | Python | JSON | + +===================+===============+ + | dict, namedtuple | object | + +-------------------+---------------+ + | list, tuple | array | + +-------------------+---------------+ + | str, unicode | string | + +-------------------+---------------+ + | int, long, float | number | + +-------------------+---------------+ + | True | true | + +-------------------+---------------+ + | False | false | + +-------------------+---------------+ + | None | null | + +-------------------+---------------+ + + To extend this to recognize other objects, subclass and implement a + ``.default()`` method with another method that returns a serializable + object for ``o`` if possible, otherwise it should call the superclass + implementation (to raise ``TypeError``). + + """ + item_separator = ', ' + key_separator = ': ' + def __init__(self, skipkeys=False, ensure_ascii=True, + check_circular=True, allow_nan=True, sort_keys=False, + indent=None, separators=None, encoding='utf-8', default=None, + use_decimal=True, namedtuple_as_object=True, + tuple_as_array=True): + """Constructor for JSONEncoder, with sensible defaults. + + If skipkeys is false, then it is a TypeError to attempt + encoding of keys that are not str, int, long, float or None. If + skipkeys is True, such items are simply skipped. + + If ensure_ascii is true, the output is guaranteed to be str + objects with all incoming unicode characters escaped. If + ensure_ascii is false, the output will be unicode object. + + If check_circular is true, then lists, dicts, and custom encoded + objects will be checked for circular references during encoding to + prevent an infinite recursion (which would cause an OverflowError). + Otherwise, no such check takes place. + + If allow_nan is true, then NaN, Infinity, and -Infinity will be + encoded as such. This behavior is not JSON specification compliant, + but is consistent with most JavaScript based encoders and decoders. + Otherwise, it will be a ValueError to encode such floats. + + If sort_keys is true, then the output of dictionaries will be + sorted by key; this is useful for regression tests to ensure + that JSON serializations can be compared on a day-to-day basis. + + If indent is a string, then JSON array elements and object members + will be pretty-printed with a newline followed by that string repeated + for each level of nesting. ``None`` (the default) selects the most compact + representation without any newlines. For backwards compatibility with + versions of simplejson earlier than 2.1.0, an integer is also accepted + and is converted to a string with that many spaces. + + If specified, separators should be a (item_separator, key_separator) + tuple. The default is (', ', ': '). To get the most compact JSON + representation you should specify (',', ':') to eliminate whitespace. + + If specified, default is a function that gets called for objects + that can't otherwise be serialized. It should return a JSON encodable + version of the object or raise a ``TypeError``. + + If encoding is not None, then all input strings will be + transformed into unicode using that encoding prior to JSON-encoding. + The default is UTF-8. + + If use_decimal is true (not the default), ``decimal.Decimal`` will + be supported directly by the encoder. For the inverse, decode JSON + with ``parse_float=decimal.Decimal``. + + If namedtuple_as_object is true (the default), tuple subclasses with + ``_asdict()`` methods will be encoded as JSON objects. + + If tuple_as_array is true (the default), tuple (and subclasses) will + be encoded as JSON arrays. + """ + + self.skipkeys = skipkeys + self.ensure_ascii = ensure_ascii + self.check_circular = check_circular + self.allow_nan = allow_nan + self.sort_keys = sort_keys + self.use_decimal = use_decimal + self.namedtuple_as_object = namedtuple_as_object + self.tuple_as_array = tuple_as_array + if isinstance(indent, (int, long)): + indent = ' ' * indent + self.indent = indent + if separators is not None: + self.item_separator, self.key_separator = separators + elif indent is not None: + self.item_separator = ',' + if default is not None: + self.default = default + self.encoding = encoding + + def default(self, o): + """Implement this method in a subclass such that it returns + a serializable object for ``o``, or calls the base implementation + (to raise a ``TypeError``). + + For example, to support arbitrary iterators, you could + implement default like this:: + + def default(self, o): + try: + iterable = iter(o) + except TypeError: + pass + else: + return list(iterable) + return JSONEncoder.default(self, o) + + """ + raise TypeError(repr(o) + " is not JSON serializable") + + def encode(self, o): + """Return a JSON string representation of a Python data structure. + + >>> from simplejson import JSONEncoder + >>> JSONEncoder().encode({"foo": ["bar", "baz"]}) + '{"foo": ["bar", "baz"]}' + + """ + # This is for extremely simple cases and benchmarks. + if isinstance(o, basestring): + if isinstance(o, str): + _encoding = self.encoding + if (_encoding is not None + and not (_encoding == 'utf-8')): + o = o.decode(_encoding) + if self.ensure_ascii: + return encode_basestring_ascii(o) + else: + return encode_basestring(o) + # This doesn't pass the iterator directly to ''.join() because the + # exceptions aren't as detailed. The list call should be roughly + # equivalent to the PySequence_Fast that ''.join() would do. + chunks = self.iterencode(o, _one_shot=True) + if not isinstance(chunks, (list, tuple)): + chunks = list(chunks) + if self.ensure_ascii: + return ''.join(chunks) + else: + return u''.join(chunks) + + def iterencode(self, o, _one_shot=False): + """Encode the given object and yield each string + representation as available. + + For example:: + + for chunk in JSONEncoder().iterencode(bigobject): + mysocket.write(chunk) + + """ + if self.check_circular: + markers = {} + else: + markers = None + if self.ensure_ascii: + _encoder = encode_basestring_ascii + else: + _encoder = encode_basestring + if self.encoding != 'utf-8': + def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding): + if isinstance(o, str): + o = o.decode(_encoding) + return _orig_encoder(o) + + def floatstr(o, allow_nan=self.allow_nan, + _repr=FLOAT_REPR, _inf=PosInf, _neginf=-PosInf): + # Check for specials. Note that this type of test is processor + # and/or platform-specific, so do tests which don't depend on + # the internals. + + if o != o: + text = 'NaN' + elif o == _inf: + text = 'Infinity' + elif o == _neginf: + text = '-Infinity' + else: + return _repr(o) + + if not allow_nan: + raise ValueError( + "Out of range float values are not JSON compliant: " + + repr(o)) + + return text + + + key_memo = {} + if (_one_shot and c_make_encoder is not None + and self.indent is None): + _iterencode = c_make_encoder( + markers, self.default, _encoder, self.indent, + self.key_separator, self.item_separator, self.sort_keys, + self.skipkeys, self.allow_nan, key_memo, self.use_decimal, + self.namedtuple_as_object, self.tuple_as_array) + else: + _iterencode = _make_iterencode( + markers, self.default, _encoder, self.indent, floatstr, + self.key_separator, self.item_separator, self.sort_keys, + self.skipkeys, _one_shot, self.use_decimal, + self.namedtuple_as_object, self.tuple_as_array) + try: + return _iterencode(o, 0) + finally: + key_memo.clear() + + +class JSONEncoderForHTML(JSONEncoder): + """An encoder that produces JSON safe to embed in HTML. + + To embed JSON content in, say, a script tag on a web page, the + characters &, < and > should be escaped. They cannot be escaped + with the usual entities (e.g. &) because they are not expanded + within |;var)" + SHOW_PATTERN = r"title='(.*?)'" + SERIE_PATTERN = r".*Serie.*" + + def decrypt(self, pyfile): + src = self.req.load(str(pyfile.url)) + + pattern = re.compile(self.VALUES_PATTERN, re.DOTALL) + data = json_loads(re.search(pattern, src).group(1)) + + # Get package info + links = [] + Showname = re.search(self.SHOW_PATTERN, src) + if Showname: + Showname = Showname.group(1).decode("utf-8") + else: + Showname = self.pyfile.package().name + + if re.search(self.SERIE_PATTERN, src): + for Season in data: + self.logDebug("Season " + Season) + for Episode in data[Season]: + self.logDebug("Episode " + Episode) + links.extend(self.getpreferred(data[Season][Episode])) + if self.getConfig("changeNameS") == "Episode": + self.packages.append((data[Season][Episode]['info']['name'].split("»")[0], links, data[Season][Episode]['info']['name'].split("»")[0])) + links = [] + + if self.getConfig("changeNameS") == "Season": + self.packages.append((Showname + " Season " + Season, links, Showname + " Season " + Season)) + links = [] + + if self.getConfig("changeNameS") == "Show": + if links == []: + self.fail('Could not extract any links (Out of Date?)') + else: + self.packages.append((Showname, links, Showname)) + + elif self.getConfig("changeNameS") == "Packagename": + if links == []: + self.fail('Could not extract any links (Out of Date?)') + else: + self.core.files.addLinks(links, self.pyfile.package().id) + else: + for Movie in data: + links.extend(self.getpreferred(data[Movie])) + if self.getConfig("changeName") == "Movie": + if links == []: + self.fail('Could not extract any links (Out of Date?)') + else: + self.packages.append((Showname, links, Showname)) + + elif self.getConfig("changeName") == "Packagename": + if links == []: + self.fail('Could not extract any links (Out of Date?)') + else: + self.core.files.addLinks(links, self.pyfile.package().id) + + #selects the preferred hoster, after that selects any hoster (ignoring the one to ignore) + #selects only one Hoster + def getpreferred(self, hosterslist): + hosterlist = {} + if 'u' in hosterslist: + hosterlist.update(hosterslist['u']) + if ('d' in hosterslist): + hosterlist.update(hosterslist['d']) + if self.getConfig("useStreams") and 's' in hosterslist: + hosterlist.update(hosterslist['s']) + + result = [] + preferredList = self.getConfig("hosterList").strip().lower().replace('|',',').replace('.','').replace(';',',').split(',') + if self.getConfig("randomPreferred") == True: + random.shuffle(preferredList) + for preferred in preferredList: + for Hoster in hosterlist: + if preferred == Hoster.split('<')[0].strip().lower().replace('.',''): + for Part in hosterlist[Hoster]: + self.logDebug("selected " + Part[3]) + result.append(str(Part[3])) + return result + + ignorelist = self.getConfig("ignoreList").strip().lower().replace('|',',').replace('.','').replace(';',',').split(',') + if self.getConfig('hosterListMode') == "all": + for Hoster in hosterlist: + if Hoster.split('<')[0].strip().lower().replace('.','') not in ignorelist: + for Part in hosterlist[Hoster]: + self.logDebug("selected " + Part[3]) + result.append(str(Part[3])) + return result + return result + + + diff --git a/pyload/plugins/crypter/CCF.py b/pyload/plugins/crypter/CCF.py new file mode 100644 index 000000000..ab7ff1099 --- /dev/null +++ b/pyload/plugins/crypter/CCF.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import re +from urllib2 import build_opener + +from module.plugins.Crypter import Crypter +from module.lib.MultipartPostHandler import MultipartPostHandler + +from os import makedirs +from os.path import exists, join + +class CCF(Crypter): + __name__ = "CCF" + __version__ = "0.2" + __pattern__ = r"(?!http://).*\.ccf$" + __description__ = """CCF Container Convert Plugin""" + __author_name__ = ("Willnix") + __author_mail__ = ("Willnix@pyload.org") + + def decrypt(self, pyfile): + + infile = pyfile.url.replace("\n", "") + + opener = build_opener(MultipartPostHandler) + params = {"src": "ccf", + "filename": "test.ccf", + "upload": open(infile, "rb")} + tempdlc_content = opener.open('http://service.jdownloader.net/dlcrypt/getDLC.php', params).read() + + download_folder = self.config['general']['download_folder'] + location = download_folder #join(download_folder, self.pyfile.package().folder.decode(sys.getfilesystemencoding())) + if not exists(location): + makedirs(location) + + tempdlc_name = join(location, "tmp_%s.dlc" % pyfile.name) + tempdlc = open(tempdlc_name, "w") + tempdlc.write(re.search(r'(.*)', tempdlc_content, re.DOTALL).group(1)) + tempdlc.close() + + self.packages.append((tempdlc_name, [tempdlc_name], tempdlc_name)) + diff --git a/pyload/plugins/crypter/CrockoComFolder.py b/pyload/plugins/crypter/CrockoComFolder.py new file mode 100644 index 000000000..d727ec7ab --- /dev/null +++ b/pyload/plugins/crypter/CrockoComFolder.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- + +from module.plugins.internal.SimpleCrypter import SimpleCrypter + +class CrockoComFolder(SimpleCrypter): + __name__ = "CrockoComFolder" + __type__ = "crypter" + __pattern__ = r"http://(www\.)?crocko.com/f/.*" + __version__ = "0.01" + __description__ = """Crocko.com Folder Plugin""" + __author_name__ = ("zoidberg") + __author_mail__ = ("zoidberg@mujmail.cz") + + LINK_PATTERN = r'download' \ No newline at end of file diff --git a/pyload/plugins/crypter/CryptItCom.py b/pyload/plugins/crypter/CryptItCom.py new file mode 100644 index 000000000..4935758c7 --- /dev/null +++ b/pyload/plugins/crypter/CryptItCom.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- + + +import re + +from random import randint + +from module.plugins.Crypter import Crypter + + +class CryptItCom(Crypter): + __name__ = "CryptItCom" + __type__ = "container" + __pattern__ = r"http://[\w\.]*?crypt-it\.com/(s|e|d|c)/[\w]+" + __version__ = "0.1" + __description__ = """Crypt.It.com Container Plugin""" + __author_name__ = ("jeix") + __author_mail__ = ("jeix@hasnomail.de") + + def file_exists(self): + html = self.load(self.pyfile.url) + if r'
Was ist Crypt-It
' in html: + return False + return True + + def decrypt(self, pyfile): + if not self.file_exists(): + self.offline() + + # @TODO parse name and password + repl_pattern = r"/(s|e|d|c)/" + url = re.sub(repl_pattern, r"/d/", self.pyfile.url) + + pyfile.name = "tmp_cryptit_%s.ccf" % randint(0,1000) + location = self.download(url) + + self.packages.append(["Crypt-it Package", [location], "Crypt-it Package"]) + \ No newline at end of file diff --git a/pyload/plugins/crypter/CzshareComFolder.py b/pyload/plugins/crypter/CzshareComFolder.py new file mode 100644 index 000000000..c240c6a70 --- /dev/null +++ b/pyload/plugins/crypter/CzshareComFolder.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- + +import re +from module.plugins.Crypter import Crypter + +class CzshareComFolder(Crypter): + __name__ = "CzshareComFolder" + __type__ = "crypter" + __pattern__ = r"http://(\w*\.)*czshare\.(com|cz)/folders/.*" + __version__ = "0.1" + __description__ = """Czshare.com Folder Plugin""" + __author_name__ = ("zoidberg") + __author_mail__ = ("zoidberg@mujmail.cz") + + FOLDER_PATTERN = r'\s*\s*(.*?)
' + LINK_PATTERN = r'info' + #NEXT_PAGE_PATTERN = r'' + + def decrypt(self, pyfile): + html = self.load(self.pyfile.url) + + new_links = [] + found = re.search(self.FOLDER_PATTERN, html, re.DOTALL) + if found is None: self.fail("Parse error (FOLDER)") + new_links.extend(re.findall(self.LINK_PATTERN, found.group(1))) + + if new_links: + self.core.files.addLinks(new_links, self.pyfile.package().id) + else: + self.fail('Could not extract any links') \ No newline at end of file diff --git a/pyload/plugins/crypter/DDLMusicOrg.py b/pyload/plugins/crypter/DDLMusicOrg.py new file mode 100644 index 000000000..f7cc996d0 --- /dev/null +++ b/pyload/plugins/crypter/DDLMusicOrg.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import re +from time import sleep + +from module.plugins.Crypter import Crypter + +class DDLMusicOrg(Crypter): + __name__ = "DDLMusicOrg" + __type__ = "container" + __pattern__ = r"http://[\w\.]*?ddl-music\.org/captcha/ddlm_cr\d\.php\?\d+\?\d+" + __version__ = "0.3" + __description__ = """ddl-music.org Container Plugin""" + __author_name__ = ("mkaay") + __author_mail__ = ("mkaay@mkaay.de") + + def setup(self): + self.multiDL = False + + def decrypt(self, pyfile): + html = self.req.load(self.pyfile.url, cookies=True) + + if re.search(r"Wer dies nicht rechnen kann", html) is not None: + self.offline() + + math = re.search(r"(\d+) ([\+-]) (\d+) =\s+", htmlwithlink) + if m: + self.packages.append((self.pyfile.package().name, [m.group(1)], self.pyfile.package().folder)) + else: + self.retry() diff --git a/pyload/plugins/crypter/DataHuFolder.py b/pyload/plugins/crypter/DataHuFolder.py new file mode 100644 index 000000000..f710f60d7 --- /dev/null +++ b/pyload/plugins/crypter/DataHuFolder.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- + +############################################################################ +# This program is free software: you can redistribute it and/or modify # +# it under the terms of the GNU Affero General Public License as # +# published by the Free Software Foundation, either version 3 of the # +# License, or (at your option) any later version. # +# # +# This program is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU Affero General Public License for more details. # +# # +# You should have received a copy of the GNU Affero General Public License # +# along with this program. If not, see . # +############################################################################ + +import re + +from module.plugins.internal.SimpleCrypter import SimpleCrypter + + +class DataHuFolder(SimpleCrypter): + __name__ = "DataHuFolder" + __type__ = "crypter" + __pattern__ = r"http://(www\.)?data.hu/dir/\w+" + __version__ = "0.03" + __description__ = """Data.hu Folder Plugin""" + __author_name__ = ("crash", "stickell") + __author_mail__ = ("l.stickell@yahoo.it") + + LINK_PATTERN = r"\1" + TITLE_PATTERN = ur'(?P<title>.+) Let\xf6lt\xe9se' + + def decrypt(self, pyfile): + self.html = self.load(pyfile.url, decode=True) + + if u'K\xe9rlek add meg a jelsz\xf3t' in self.html: # Password protected + password = self.getPassword() + if password is '': + self.fail("No password specified, please set right password on Add package form and retry") + self.logDebug('The folder is password protected', 'Using password: ' + password) + self.html = self.load(pyfile.url, post={'mappa_pass': password}, decode=True) + if u'Hib\xe1s jelsz\xf3' in self.html: # Wrong password + self.fail("Incorrect password, please set right password on Add package form and retry") + + package_name, folder_name = self.getPackageNameAndFolder() + + package_links = re.findall(self.LINK_PATTERN, self.html) + self.logDebug('Package has %d links' % len(package_links)) + + if package_links: + self.packages = [(package_name, package_links, folder_name)] + else: + self.fail('Could not extract any links') diff --git a/pyload/plugins/crypter/DdlstorageComFolder.py b/pyload/plugins/crypter/DdlstorageComFolder.py new file mode 100644 index 000000000..d536032c6 --- /dev/null +++ b/pyload/plugins/crypter/DdlstorageComFolder.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- + +import re +from module.plugins.Crypter import Crypter +from module.plugins.hoster.MediafireCom import checkHTMLHeader +from module.common.json_layer import json_loads + +class DdlstorageComFolder(Crypter): + __name__ = "DdlstorageComFolder" + __type__ = "crypter" + __pattern__ = r"http://(?:\w*\.)*?ddlstorage.com/folder/\w{10}" + __version__ = "0.01" + __description__ = """DDLStorage.com Folder Plugin""" + __author_name__ = ("godofdream") + __author_mail__ = ("soilfiction@gmail.com") + + FILE_URL_PATTERN = '' + + def decrypt(self, pyfile): + new_links = [] + # load and parse html + html = self.load(pyfile.url) + found = re.findall(self.FILE_URL_PATTERN, html) + self.logDebug(found) + for link in found: + # file page + new_links.append("http://www.ddlstorage.com/%s" % link) + + if new_links: + self.core.files.addLinks(new_links, self.pyfile.package().id) + else: + self.fail('Could not extract any links') diff --git a/pyload/plugins/crypter/DepositfilesComFolder.py b/pyload/plugins/crypter/DepositfilesComFolder.py new file mode 100644 index 000000000..9023b238f --- /dev/null +++ b/pyload/plugins/crypter/DepositfilesComFolder.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- + +from module.plugins.internal.SimpleCrypter import SimpleCrypter + +class DepositfilesComFolder(SimpleCrypter): + __name__ = "DepositfilesComFolder" + __type__ = "crypter" + __pattern__ = r"http://(www\.)?depositfiles.com/folders/\w+" + __version__ = "0.01" + __description__ = """Depositfiles.com Folder Plugin""" + __author_name__ = ("zoidberg") + __author_mail__ = ("zoidberg@mujmail.cz") + + LINK_PATTERN = r'
]*>\s*' \ No newline at end of file diff --git a/pyload/plugins/crypter/Dereferer.py b/pyload/plugins/crypter/Dereferer.py new file mode 100644 index 000000000..584835e18 --- /dev/null +++ b/pyload/plugins/crypter/Dereferer.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- + +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . +""" + +import re +import urllib + +from module.plugins.Crypter import Crypter + +class Dereferer(Crypter): + __name__ = "Dereferer" + __type__ = "crypter" + __pattern__ = r'https?://([^/]+)/.*?(?P(ht|f)tps?(://|%3A%2F%2F).*)' + __version__ = "0.1" + __description__ = """Crypter for dereferers""" + __author_name__ = ("zoidberg") + __author_mail__ = ("zoidberg@mujmail.cz") + + def decrypt(self, pyfile): + link = re.match(self.__pattern__, self.pyfile.url).group('url') + self.core.files.addLinks([ urllib.unquote(link).rstrip('+') ], self.pyfile.package().id) diff --git a/pyload/plugins/crypter/DontKnowMe.py b/pyload/plugins/crypter/DontKnowMe.py new file mode 100644 index 000000000..dfa72df47 --- /dev/null +++ b/pyload/plugins/crypter/DontKnowMe.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- + +import re +import urllib + +from module.plugins.Crypter import Crypter + +class DontKnowMe(Crypter): + __name__ = "DontKnowMe" + __type__ = "crypter" + __pattern__ = r"http://dontknow.me/at/\?.+$" + __version__ = "0.1" + __description__ = """DontKnowMe""" + __author_name__ = ("selaux") + __author_mail__ = ("") + + LINK_PATTERN = r"http://dontknow.me/at/\?(.+)$" + + def decrypt(self, pyfile): + link = re.findall(self.LINK_PATTERN, self.pyfile.url)[0] + self.core.files.addLinks([ urllib.unquote(link) ], self.pyfile.package().id) diff --git a/pyload/plugins/crypter/DownloadVimeoCom.py b/pyload/plugins/crypter/DownloadVimeoCom.py new file mode 100644 index 000000000..88310915b --- /dev/null +++ b/pyload/plugins/crypter/DownloadVimeoCom.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import re +import HTMLParser +from module.plugins.Crypter import Crypter + +class DownloadVimeoCom(Crypter): + __name__ = 'DownloadVimeoCom' + __type__ = 'crypter' + __pattern__ = r'(?:http://vimeo\.com/\d*|http://smotri\.com/video/view/\?id=.*)' + ## The download from dailymotion failed with a 403 + __version__ = '0.1' + __description__ = """Video Download Plugin based on downloadvimeo.com""" + __author_name__ = ('4Christopher') + __author_mail__ = ('4Christopher@gmx.de') + BASE_URL = 'http://downloadvimeo.com' + + def decrypt(self, pyfile): + self.package = pyfile.package() + html = self.load('%s/generate?url=%s' % (self.BASE_URL, pyfile.url)) + h = HTMLParser.HTMLParser() + try: + f = re.search(r'cmd quality="(?P[^"]+?)">\s*?(?P[^<]*?)', html) + except: + self.logDebug('Failed to find the URL') + else: + url = h.unescape(f.group('URL')) + self.logDebug('Quality: %s, URL: %s' % (f.group('quality'), url)) + self.packages.append((self.package.name, [url], self.package.folder)) diff --git a/pyload/plugins/crypter/DuckCryptInfo.py b/pyload/plugins/crypter/DuckCryptInfo.py new file mode 100644 index 000000000..4886d24db --- /dev/null +++ b/pyload/plugins/crypter/DuckCryptInfo.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- + +import re +from module.lib.BeautifulSoup import BeautifulSoup +from module.plugins.Crypter import Crypter + +class DuckCryptInfo(Crypter): + __name__ = "DuckCryptInfo" + __type__ = "container" + __pattern__ = r"http://(?:www\.)?duckcrypt.info/(folder|wait|link)/(\w+)/?(\w*)" + __version__ = "0.02" + __description__ = """DuckCrypt.Info Container Plugin""" + __author_name__ = ("godofdream") + __author_mail__ = ("soilfiction@gmail.com") + + TIMER_PATTERN = r'(.*)' + + def decrypt(self, pyfile): + url = pyfile.url + # seems we don't need to wait + #src = self.req.load(str(url)) + #found = re.search(self.TIMER_PATTERN, src) + #if found: + # self.logDebug("Sleeping for" % found.group(1)) + # self.setWait(int(found.group(1)) ,False) + found = re.search(self.__pattern__, url) + if not found: + self.fail('Weird error in link') + if str(found.group(1)) == "link": + self.handleLink(url) + else: + self.handleFolder(found) + + + + def handleFolder(self, found): + src = self.load("http://duckcrypt.info/ajax/auth.php?hash=" + str(found.group(2))) + found = re.search(self.__pattern__, src) + self.logDebug("Redirectet to " + str(found.group(0))) + src = self.load(str(found.group(0))) + soup = BeautifulSoup(src) + cryptlinks = soup.findAll("div", attrs={"class": "folderbox"}) + self.logDebug("Redirectet to " + str(cryptlinks)) + if not cryptlinks: + self.fail('no links found - (Plugin out of date?)') + for clink in cryptlinks: + if clink.find("a"): + self.handleLink(clink.find("a")['href']) + + def handleLink(self, url): + src = self.load(url) + soup = BeautifulSoup(src) + link = soup.find("iframe")["src"] + if not link: + self.logDebug('no links found - (Plugin out of date?)') + else: + self.core.files.addLinks([link], self.pyfile.package().id) + diff --git a/pyload/plugins/crypter/EasybytezComFolder.py b/pyload/plugins/crypter/EasybytezComFolder.py new file mode 100644 index 000000000..1b887e421 --- /dev/null +++ b/pyload/plugins/crypter/EasybytezComFolder.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- + +############################################################################ +# This program is free software: you can redistribute it and/or modify # +# it under the terms of the GNU Affero General Public License as # +# published by the Free Software Foundation, either version 3 of the # +# License, or (at your option) any later version. # +# # +# This program is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU Affero General Public License for more details. # +# # +# You should have received a copy of the GNU Affero General Public License # +# along with this program. If not, see . # +############################################################################ + +import re + +from module.plugins.internal.SimpleCrypter import SimpleCrypter + + +class EasybytezComFolder(SimpleCrypter): + __name__ = "EasybytezComFolder" + __type__ = "crypter" + __pattern__ = r"https?://(www\.)?easybytez\.com/users/\w+/\w+" + __version__ = "0.01" + __description__ = """Easybytez Crypter Plugin""" + __author_name__ = ("stickell") + __author_mail__ = ("l.stickell@yahoo.it") + + LINK_PATTERN = r'' + TITLE_PATTERN = r'Files of (?P<title>.+) folder' + PAGES_PATTERN = r"(\d+)Next »
\(\d+ total\)
" + + def decrypt(self, pyfile): + self.html = self.load(pyfile.url, decode=True) + + package_name, folder_name = self.getPackageNameAndFolder() + + package_links = re.findall(self.LINK_PATTERN, self.html) + + pages = re.search(self.PAGES_PATTERN, self.html) + if pages: + pages = int(pages.group(1)) + else: + pages = 1 + + p = 2 + while p <= pages: + self.html = self.load(pyfile.url, get={'page': p}, decode=True) + package_links += re.findall(self.LINK_PATTERN, self.html) + p += 1 + + self.logDebug('Package has %d links' % len(package_links)) + + if package_links: + self.packages = [(package_name, package_links, folder_name)] + else: + self.fail('Could not extract any links') \ No newline at end of file diff --git a/pyload/plugins/crypter/EmbeduploadCom.py b/pyload/plugins/crypter/EmbeduploadCom.py new file mode 100644 index 000000000..8fd70882f --- /dev/null +++ b/pyload/plugins/crypter/EmbeduploadCom.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- + +import re +from module.plugins.Crypter import Crypter +from module.network.HTTPRequest import BadHeader + +class EmbeduploadCom(Crypter): + __name__ = "EmbeduploadCom" + __type__ = "crypter" + __pattern__ = r"http://(www\.)?embedupload.com/\?d=.*" + __version__ = "0.02" + __description__ = """EmbedUpload.com crypter""" + __config__ = [("preferedHoster", "str", "Prefered hoster list (bar-separated) ", "embedupload"), + ("ignoredHoster", "str", "Ignored hoster list (bar-separated) ", "")] + __author_name__ = ("zoidberg") + __author_mail__ = ("zoidberg@mujmail.cz") + + LINK_PATTERN = r'
]*>\s*' + + def decrypt(self, pyfile): + self.html = self.load(self.pyfile.url, decode=True) + tmp_links = [] + new_links = [] + + found = re.findall(self.LINK_PATTERN, self.html) + if found: + prefered_set = set(self.getConfig("preferedHoster").split('|')) + prefered_set = map(lambda s: s.lower().split('.')[0], prefered_set) + print "PF", prefered_set + tmp_links.extend([x[1] for x in found if x[0] in prefered_set]) + self.getLocation(tmp_links, new_links) + + if not new_links: + ignored_set = set(self.getConfig("ignoredHoster").split('|')) + ignored_set = map(lambda s: s.lower().split('.')[0], ignored_set) + print "IG", ignored_set + tmp_links.extend([x[1] for x in found if x[0] not in ignored_set]) + self.getLocation(tmp_links, new_links) + + if new_links: + self.core.files.addLinks(new_links, self.pyfile.package().id) + else: + self.fail('Could not extract any links') + + def getLocation(self, tmp_links, new_links): + for link in tmp_links: + try: + header = self.load(link, just_header = True) + if "location" in header: + new_links.append(header['location']) + except BadHeader: + pass + + \ No newline at end of file diff --git a/pyload/plugins/crypter/FilebeerInfoFolder.py b/pyload/plugins/crypter/FilebeerInfoFolder.py new file mode 100644 index 000000000..f45144f14 --- /dev/null +++ b/pyload/plugins/crypter/FilebeerInfoFolder.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- + +import re +from module.plugins.Crypter import Crypter + +class FilebeerInfoFolder(Crypter): + __name__ = "FilebeerInfoFolder" + __type__ = "crypter" + __pattern__ = r"http://(?:www\.)?filebeer\.info/(\d+~f).*" + __version__ = "0.01" + __description__ = """Filebeer.info Folder Plugin""" + __author_name__ = ("zoidberg") + __author_mail__ = ("zoidberg@mujmail.cz") + + LINK_PATTERN = r'' + PAGE_COUNT_PATTERN = r'

\s*Total Pages (\d+)' + + def decrypt(self, pyfile): + pyfile.url = re.sub(self.__pattern__, r'http://filebeer.info/\1?page=1', pyfile.url) + html = self.load(pyfile.url) + + page_count = int(re.search(self.PAGE_COUNT_PATTERN, html).group(1)) + new_links = [] + + for i in range(1, page_count + 1): + self.logInfo("Fetching links from page %i" % i) + new_links.extend(re.findall(self.LINK_PATTERN, html)) + + if i < page_count: + html = self.load("%s?page=%d" % (pyfile.url, i+1)) + + if new_links: + self.core.files.addLinks(new_links, self.pyfile.package().id) + else: + self.fail('Could not extract any links') \ No newline at end of file diff --git a/pyload/plugins/crypter/FilefactoryComFolder.py b/pyload/plugins/crypter/FilefactoryComFolder.py new file mode 100644 index 000000000..32793b491 --- /dev/null +++ b/pyload/plugins/crypter/FilefactoryComFolder.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- + +import re +from module.plugins.Crypter import Crypter + +class FilefactoryComFolder(Crypter): + __name__ = "FilefactoryComFolder" + __type__ = "crypter" + __pattern__ = r"(http://(www\.)?filefactory\.com/f/\w+).*" + __version__ = "0.1" + __description__ = """Filefactory.com Folder Plugin""" + __author_name__ = ("zoidberg") + __author_mail__ = ("zoidberg@mujmail.cz") + + FOLDER_PATTERN = r'(.*?)
' + LINK_PATTERN = r'
' + PAGINATOR_PATTERN = r'

' + NEXT_PAGE_PATTERN = r'
  • .*?
  • \s*
  • ' + + def decrypt(self, pyfile): + url_base = re.search(self.__pattern__, self.pyfile.url).group(1) + html = self.load(url_base) + + new_links = [] + for i in range(1,100): + self.logInfo("Fetching links from page %i" % i) + found = re.search(self.FOLDER_PATTERN, html, re.DOTALL) + if found is None: self.fail("Parse error (FOLDER)") + + new_links.extend(re.findall(self.LINK_PATTERN, found.group(1))) + + try: + paginator = re.search(self.PAGINATOR_PATTERN, html, re.DOTALL).group(1) + next_page = re.search(self.NEXT_PAGE_PATTERN, paginator).group(1) + html = self.load("%s/%s" % (url_base, next_page)) + except Exception, e: + break + else: + self.logInfo("Limit of 99 pages reached, aborting") + + if new_links: + self.core.files.addLinks(new_links, self.pyfile.package().id) + else: + self.fail('Could not extract any links') \ No newline at end of file diff --git a/pyload/plugins/crypter/FileserveComFolder.py b/pyload/plugins/crypter/FileserveComFolder.py new file mode 100644 index 000000000..9fe806971 --- /dev/null +++ b/pyload/plugins/crypter/FileserveComFolder.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- + +import re + +from module.plugins.Crypter import Crypter + +class FileserveComFolder(Crypter): + __name__ = "FileserveComFolder" + __type__ = "crypter" + __pattern__ = r"http://(?:www\.)?fileserve.com/list/\w+" + __version__ = "0.11" + __description__ = """FileServeCom.com Folder Plugin""" + __author_name__ = ("fionnc") + __author_mail__ = ("fionnc@gmail.com") + + FOLDER_PATTERN = r'(.*?)
    ' + LINK_PATTERN = r'
    ' + + def decrypt(self, pyfile): + html = self.load(self.pyfile.url) + + new_links = [] + + folder = re.search(self.FOLDER_PATTERN, html, re.DOTALL) + if folder is None: self.fail("Parse error (FOLDER)") + + new_links.extend(re.findall(self.LINK_PATTERN, folder.group(1))) + + if new_links: + self.core.files.addLinks(map(lambda s:"http://fileserve.com%s" % s, new_links), self.pyfile.package().id) + else: + self.fail('Could not extract any links') \ No newline at end of file diff --git a/pyload/plugins/crypter/FourChanOrg.py b/pyload/plugins/crypter/FourChanOrg.py new file mode 100644 index 000000000..5c96e723d --- /dev/null +++ b/pyload/plugins/crypter/FourChanOrg.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import re + +from module.plugins.Crypter import Crypter + +class FourChanOrg(Crypter): + # Based on 4chandl by Roland Beermann + # https://gist.github.com/enkore/3492599 + __name__ = "FourChanOrg" + __type__ = "container" + __version__ = "0.3" + __pattern__ = r"http://boards\.4chan.org/\w+/res/(\d+)" + __description__ = "Downloader for entire 4chan threads" + + def decrypt(self, pyfile): + pagehtml = self.load(pyfile.url) + + images = set(re.findall(r'(images\.4chan\.org/[^/]*/src/[^"<]*)', pagehtml)) + urls = [] + for image in images: + urls.append("http://" + image) + + self.core.files.addLinks(urls, self.pyfile.package().id) diff --git a/pyload/plugins/crypter/FshareVnFolder.py b/pyload/plugins/crypter/FshareVnFolder.py new file mode 100644 index 000000000..2515e7edd --- /dev/null +++ b/pyload/plugins/crypter/FshareVnFolder.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- + +from module.plugins.internal.SimpleCrypter import SimpleCrypter + +class FshareVnFolder(SimpleCrypter): + __name__ = "FshareVnFolder" + __type__ = "crypter" + __pattern__ = r"http://(www\.)?fshare.vn/folder/.*" + __version__ = "0.01" + __description__ = """Fshare.vn Folder Plugin""" + __author_name__ = ("zoidberg") + __author_mail__ = ("zoidberg@mujmail.cz") + + LINK_PATTERN = r'
  • ' \ No newline at end of file diff --git a/pyload/plugins/crypter/GooGl.py b/pyload/plugins/crypter/GooGl.py new file mode 100644 index 000000000..bcb1d7494 --- /dev/null +++ b/pyload/plugins/crypter/GooGl.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- + +############################################################################ +# This program is free software: you can redistribute it and/or modify # +# it under the terms of the GNU Affero General Public License as # +# published by the Free Software Foundation, either version 3 of the # +# License, or (at your option) any later version. # +# # +# This program is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU Affero General Public License for more details. # +# # +# You should have received a copy of the GNU Affero General Public License # +# along with this program. If not, see . # +############################################################################ + +from module.plugins.Crypter import Crypter +from module.common.json_layer import json_loads + + +class GooGl(Crypter): + __name__ = "GooGl" + __type__ = "crypter" + __pattern__ = r"https?://(www\.)?goo\.gl/\w+" + __version__ = "0.01" + __description__ = """Goo.gl Crypter Plugin""" + __author_name__ = ("stickell") + __author_mail__ = ("l.stickell@yahoo.it") + + API_URL = 'https://www.googleapis.com/urlshortener/v1/url' + + def decrypt(self, pyfile): + rep = self.load(self.API_URL, get={'shortUrl': pyfile.url}) + self.logDebug('JSON data: ' + rep) + rep = json_loads(rep) + + if 'longUrl' in rep: + self.core.files.addLinks([rep['longUrl']], self.pyfile.package().id) + else: + self.fail('Unable to expand shortened link') diff --git a/pyload/plugins/crypter/HoerbuchIn.py b/pyload/plugins/crypter/HoerbuchIn.py new file mode 100644 index 000000000..6f23b2eb9 --- /dev/null +++ b/pyload/plugins/crypter/HoerbuchIn.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import re + +from module.plugins.Crypter import Crypter +from module.lib.BeautifulSoup import BeautifulSoup, BeautifulStoneSoup + +class HoerbuchIn(Crypter): + __name__ = "HoerbuchIn" + __type__ = "container" + __pattern__ = r"http://(www\.)?hoerbuch\.in/(wp/horbucher/\d+/.+/|tp/out.php\?.+|protection/folder_\d+\.html)" + __version__ = "0.7" + __description__ = """Hoerbuch.in Container Plugin""" + __author_name__ = ("spoob", "mkaay") + __author_mail__ = ("spoob@pyload.org", "mkaay@mkaay.de") + + article = re.compile("http://(www\.)?hoerbuch\.in/wp/horbucher/\d+/.+/") + protection = re.compile("http://(www\.)?hoerbuch\.in/protection/folder_\d+.html") + + def decrypt(self, pyfile): + self.pyfile = pyfile + + if self.article.match(self.pyfile.url): + src = self.load(self.pyfile.url) + soup = BeautifulSoup(src, convertEntities=BeautifulStoneSoup.HTML_ENTITIES) + + abookname = soup.find("a", attrs={"rel": "bookmark"}).text + for a in soup.findAll("a", attrs={"href": self.protection}): + package = "%s (%s)" % (abookname, a.previousSibling.previousSibling.text[:-1]) + links = self.decryptFolder(a["href"]) + + self.packages.append((package, links, self.pyfile.package().folder)) + else: + links = self.decryptFolder(self.pyfile.url) + + self.packages.append((self.pyfile.package().name, links, self.pyfile.package().folder)) + + def decryptFolder(self, url): + m = self.protection.search(url) + if not m: + self.fail("Bad URL") + url = m.group(0) + + self.pyfile.url = url + src = self.req.load(url, post={"viewed": "adpg"}) + + links = [] + pattern = re.compile(r'
    ", src) + + if not len(links) > 0: + self.retry() + + self.correctCaptcha() + + cleanedLinks = [] + for link in links: + if link.startswith("http://dontknow.me/at/?"): + cleanedLink = urllib.unquote(link[23:]) + else: + cleanedLink = link + self.logDebug("Link: %s" % cleanedLink) + cleanedLinks.append(cleanedLink) + + self.logDebug("Decrypted %d links" % len(links)) + + self.pyfile.package().password = "iload.to" + self.packages.append((self.pyfile.package().name, cleanedLinks, self.pyfile.package().folder)) \ No newline at end of file diff --git a/pyload/plugins/crypter/LetitbitNetFolder.py b/pyload/plugins/crypter/LetitbitNetFolder.py new file mode 100644 index 000000000..68aad9dd7 --- /dev/null +++ b/pyload/plugins/crypter/LetitbitNetFolder.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- + +import re +from module.plugins.Crypter import Crypter + + +class LetitbitNetFolder(Crypter): + __name__ = "LetitbitNetFolder" + __type__ = "crypter" + __pattern__ = r"http://(?:www\.)?letitbit.net/folder/\w+" + __version__ = "0.1" + __description__ = """Letitbit.net Folder Plugin""" + __author_name__ = ("DHMH", "z00nx") + __author_mail__ = ("webmaster@pcProfil.de", "z00nx0@gmail.com") + + FOLDER_PATTERN = r'(.*)
    ' + LINK_PATTERN = r'
    ' + + def decrypt(self, pyfile): + html = self.load(self.pyfile.url) + + new_links = [] + + folder = re.search(self.FOLDER_PATTERN, html, re.DOTALL) + if folder is None: + self.fail("Parse error (FOLDER)") + + new_links.extend(re.findall(self.LINK_PATTERN, folder.group(0))) + + if new_links: + self.core.files.addLinks(new_links, self.pyfile.package().id) + else: + self.fail('Could not extract any links') diff --git a/pyload/plugins/crypter/LinkList.py b/pyload/plugins/crypter/LinkList.py new file mode 100644 index 000000000..ebfa373eb --- /dev/null +++ b/pyload/plugins/crypter/LinkList.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from module.plugins.Crypter import Crypter, Package + +class LinkList(Crypter): + __name__ = "LinkList" + __version__ = "0.11" + __pattern__ = r".+\.txt$" + __description__ = """Read Link Lists in txt format""" + __author_name__ = ("spoob", "jeix") + __author_mail__ = ("spoob@pyload.org", "jeix@hasnomail.com") + + # method declaration is needed here + def decryptURL(self, url): + return Crypter.decryptURL(self, url) + + def decryptFile(self, content): + links = content.splitlines() + + curPack = "default" + packages = {curPack:[]} + + for link in links: + link = link.strip() + if not link: continue + + if link.startswith(";"): + continue + if link.startswith("[") and link.endswith("]"): + # new package + curPack = link[1:-1] + packages[curPack] = [] + continue + packages[curPack].append(link) + + # empty packages fix + delete = [] + + for key,value in packages.iteritems(): + if not value: + delete.append(key) + + for key in delete: + del packages[key] + + urls = [] + + for name, links in packages.iteritems(): + if name == "default": + urls.extend(links) + else: + urls.append(Package(name, links)) + + return urls \ No newline at end of file diff --git a/pyload/plugins/crypter/LinkSaveIn.py b/pyload/plugins/crypter/LinkSaveIn.py new file mode 100644 index 000000000..e021316bf --- /dev/null +++ b/pyload/plugins/crypter/LinkSaveIn.py @@ -0,0 +1,227 @@ +# -*- coding: utf-8 -*- + +# +# v2.01 - hagg +# * cnl2 and web links are skipped if JS is not available (instead of failing the package) +# * only best available link source is used (priority: cnl2>rsdf>ccf>dlc>web +# + +from Crypto.Cipher import AES +from module.plugins.Crypter import Crypter +from module.unescape import unescape +import base64 +import binascii +import re + +class LinkSaveIn(Crypter): + __name__ = "LinkSaveIn" + __type__ = "crypter" + __pattern__ = r"http://(www\.)?linksave.in/(?P\w+)$" + __version__ = "2.01" + __description__ = """LinkSave.in Crypter Plugin""" + __author_name__ = ("fragonib") + __author_mail__ = ("fragonib[AT]yahoo[DOT]es") + + # Constants + _JK_KEY_ = "jk" + _CRYPTED_KEY_ = "crypted" + HOSTER_DOMAIN = "linksave.in" + + def setup(self): + self.html = None + self.fileid = None + self.captcha = False + self.package = None + self.preferred_sources = ['cnl2', 'rsdf', 'ccf', 'dlc', 'web'] + + def decrypt(self, pyfile): + + # Init + self.package = pyfile.package() + self.fileid = re.match(self.__pattern__, pyfile.url).group('id') + self.req.cj.setCookie(self.HOSTER_DOMAIN, "Linksave_Language", "english") + + # Request package + self.html = self.load(self.pyfile.url) + if not self.isOnline(): + self.offline() + + # Check for protection + if self.isPasswordProtected(): + self.unlockPasswordProtection() + self.handleErrors() + + if self.isCaptchaProtected(): + self.captcha = True + self.unlockCaptchaProtection() + self.handleErrors() + + # Get package name and folder + (package_name, folder_name) = self.getPackageInfo() + + # Extract package links + package_links = [] + for type_ in self.preferred_sources: + package_links.extend(self.handleLinkSource(type_)) + if package_links: # use only first source which provides links + break + package_links = set(package_links) + + # Pack + if package_links: + self.packages = [(package_name, package_links, folder_name)] + else: + self.fail('Could not extract any links') + + def isOnline(self): + if "Error 404 - Folder not found!" in self.html: + self.logDebug("File not found") + return False + return True + + def isPasswordProtected(self): + if re.search(r'''Captcha:" in self.html: + self.logDebug("Links are captcha protected") + return True + return False + + def unlockPasswordProtection(self): + password = self.getPassword() + self.logDebug("Submitting password [%s] for protected links" % password) + post = {"id": self.fileid, "besucherpasswort": password, 'login': 'submit'} + self.html = self.load(self.pyfile.url, post=post) + + def unlockCaptchaProtection(self): + captcha_hash = re.search(r'name="hash" value="([^"]+)', self.html).group(1) + captcha_url = re.search(r'src=".(/captcha/cap.php\?hsh=[^"]+)', self.html).group(1) + captcha_code = self.decryptCaptcha("http://linksave.in" + captcha_url, forceUser=True) + self.html = self.load(self.pyfile.url, post={"id": self.fileid, "hash": captcha_hash, "code": captcha_code}) + + def getPackageInfo(self): + name = self.pyfile.package().name + folder = self.pyfile.package().folder + self.logDebug("Defaulting to pyfile name [%s] and folder [%s] for package" % (name, folder)) + return name, folder + + def handleErrors(self): + if "The visitorpassword you have entered is wrong" in self.html: + self.logDebug("Incorrect password, please set right password on 'Edit package' form and retry") + self.fail("Incorrect password, please set right password on 'Edit package' form and retry") + + if self.captcha: + if "Wrong code. Please retry" in self.html: + self.logDebug("Invalid captcha, retrying") + self.invalidCaptcha() + self.retry() + else: + self.correctCaptcha() + + def handleLinkSource(self, type_): + if type_ == 'cnl2': + return self.handleCNL2() + elif type_ in ('rsdf', 'ccf', 'dlc'): + return self.handleContainer(type_) + elif type_ == 'web': + return self.handleWebLinks() + else: + self.fail('unknown source type "%s" (this is probably a bug)' % type_) + + def handleWebLinks(self): + package_links = [] + self.logDebug("Search for Web links") + if not self.js: + self.logDebug("no JS -> skip Web links") + else: + #@TODO: Gather paginated web links + pattern = r'(.*)', response)[-1] + jseval = self.js.eval("document = { write: function(e) { return e; } }; %s" % jscode) + dlLink = re.search(r'http://linksave\.in/dl-\w+', jseval).group(0) + self.logDebug("JsEngine returns value [%s] for redirection link" % dlLink) + response = self.load(dlLink) + link = unescape(re.search(r'') + for pattern in patterns: + rexpr = re.compile(pattern, re.DOTALL) + content = re.sub(rexpr, "", content) + return content + + def removeContainers(self,package_links): + tmp_package_links = package_links[:] + for link in tmp_package_links: + self.logDebug(link) + if ".dlc" in link or ".ccf" in link or ".rsdf" in link: + self.logDebug("Removing [%s] from package_links" % link) + package_links.remove(link) + + if len(package_links) > 0: + return package_links + else: + return tmp_package_links + + def isOnline(self): + if "Your folder does not exist" in self.cleanedHtml: + self.logDebug("File not found") + return False + return True + + def isProtected(self): + if re.search(r'''''', self.cleanedHtml): + self.logDebug("Links are protected") + return True + return False + + def getPackageInfo(self): + title_re = r'

    (?P[^<]+).*?</span>.*?</h2>' + m = re.findall(title_re, self.html, re.DOTALL) + if m is not None: + title = m[-1].strip() + name = folder = title + self.logDebug("Found name [%s] and folder [%s] in package info" % (name, folder)) + else: + name = self.package.name + folder = self.package.folder + self.logDebug("Package info not found, defaulting to pyfile name [%s] and folder [%s]" % (name, folder)) + return name, folder + + def unlockProtection(self): + + postData = {} + + form = re.search(r'''<form\ name="protected"(.*?)</form>''', self.cleanedHtml, re.DOTALL).group(1) + + # Submit package password + if "password" in form: + password = self.getPassword() + self.logDebug("Submitting password [%s] for protected links" % password) + postData['password'] = password + + # Resolve anicaptcha + if "anicaptcha" in form: + self.captcha = True + self.logDebug("Captcha protected, resolving captcha") + captchaUri = re.search(r'src="(/temp/anicaptcha/[^"]+)', form).group(1) + captcha = self.decryptCaptcha("http://ncrypt.in" + captchaUri) + self.logDebug("Captcha resolved [%s]" % captcha) + postData['captcha'] = captcha + + # Resolve recaptcha + if "recaptcha" in form: + self.captcha = True + id = re.search(r'\?k=(.*?)"', form).group(1) + self.logDebug("Resolving ReCaptcha with key [%s]" % id) + recaptcha = ReCaptcha(self) + challenge, code = recaptcha.challenge(id) + postData['recaptcha_challenge_field'] = challenge + postData['recaptcha_response_field'] = code + + # Resolve circlecaptcha + if "circlecaptcha" in form: + self.captcha = True + self.logDebug("Captcha protected") + captcha_img_url = "http://ncrypt.in/classes/captcha/circlecaptcha.php" + coords = self.decryptCaptcha(captcha_img_url, forceUser=True, imgtype="png", result_type='positional') + self.logDebug("Captcha resolved, coords [%s]" % str(coords)) + self.captcha_post_url = self.pyfile.url + + postData['circle.x'] = coords[0] + postData['circle.y'] = coords[1] + + + # Unlock protection + postData['submit_protected'] = 'Continue to folder ' + return self.load(self.pyfile.url, post=postData) + + def handleErrors(self): + + if "This password is invalid!" in self.cleanedHtml: + self.logDebug("Incorrect password, please set right password on 'Edit package' form and retry") + self.fail("Incorrect password, please set right password on 'Edit package' form and retry") + + if self.captcha: + if "The securitycheck was wrong!" in self.cleanedHtml: + self.logDebug("Invalid captcha, retrying") + self.invalidCaptcha() + self.retry() + else: + self.correctCaptcha() + + def handleWebLinks(self): + package_links = [] + self.logDebug("Handling Web links") + + pattern = r"(http://ncrypt\.in/link-.*?=)" + links = re.findall(pattern, self.html) + self.logDebug("Decrypting %d Web links" % len(links)) + for i, link in enumerate(links): + self.logDebug("Decrypting Web link %d, %s" % (i+1, link)) + try: + url = link.replace("link-", "frame-") + link = self.load(url, just_header=True)['location'] + package_links.append(link) + except Exception, detail: + self.logDebug("Error decrypting Web link %s, %s" % (link, detail)) + return package_links + + def handleContainers(self): + package_links = [] + self.logDebug("Handling Container links") + + pattern = r"/container/(rsdf|dlc|ccf)/([a-z0-9]+)" + containersLinks = re.findall(pattern, self.html) + self.logDebug("Decrypting %d Container links" % len(containersLinks)) + for containerLink in containersLinks: + link = "http://ncrypt.in/container/%s/%s.%s" % (containerLink[0], containerLink[1], containerLink[0]) + package_links.append(link) + return package_links + + def handleCNL2(self): + package_links = [] + self.logDebug("Handling CNL2 links") + + if 'cnl2_output' in self.cleanedHtml: + try: + (vcrypted, vjk) = self._getCipherParams() + for (crypted, jk) in zip(vcrypted, vjk): + package_links.extend(self._getLinks(crypted, jk)) + except: + self.fail("Unable to decrypt CNL2 links") + return package_links + + def _getCipherParams(self): + + pattern = r'<input.*?name="%s".*?value="(.*?)"' + + # Get jk + jk_re = pattern % NCryptIn._JK_KEY_ + vjk = re.findall(jk_re, self.html) + + # Get crypted + crypted_re = pattern % NCryptIn._CRYPTED_KEY_ + vcrypted = re.findall(crypted_re, self.html) + + # Log and return + self.logDebug("Detected %d crypted blocks" % len(vcrypted)) + return vcrypted, vjk + + def _getLinks(self, crypted, jk): + + # Get key + jreturn = self.js.eval("%s f()" % jk) + self.logDebug("JsEngine returns value [%s]" % jreturn) + key = binascii.unhexlify(jreturn) + + # Decode crypted + crypted = base64.standard_b64decode(crypted) + + # Decrypt + Key = key + IV = key + obj = AES.new(Key, AES.MODE_CBC, IV) + text = obj.decrypt(crypted) + + # Extract links + text = text.replace("\x00", "").replace("\r", "") + links = text.split("\n") + links = filter(lambda x: x != "", links) + + # Log and return + self.logDebug("Block has %d links" % len(links)) + return links diff --git a/pyload/plugins/crypter/NetfolderIn.py b/pyload/plugins/crypter/NetfolderIn.py new file mode 100644 index 000000000..c5c602c27 --- /dev/null +++ b/pyload/plugins/crypter/NetfolderIn.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- + +import re + +from module.plugins.internal.SimpleCrypter import SimpleCrypter + + +class NetfolderIn(SimpleCrypter): + __name__ = "NetfolderIn" + __type__ = "crypter" + __pattern__ = r"http://(?:www\.)?netfolder.in/((?P<id1>\w+)/\w+|folder.php\?folder_id=(?P<id2>\w+))" + __version__ = "0.6" + __description__ = """NetFolder Crypter Plugin""" + __author_name__ = ("RaNaN", "fragonib") + __author_mail__ = ("RaNaN@pyload.org", "fragonib[AT]yahoo[DOT]es") + + TITLE_PATTERN = r'<div class="Text">Inhalt des Ordners <span(.*)>(?P<title>.+)</span></div>' + + def decrypt(self, pyfile): + # Request package + self.html = self.load(pyfile.url) + + # Check for password protection + if self.isPasswordProtected(): + self.html = self.submitPassword() + if self.html is None: + self.fail("Incorrect password, please set right password on Add package form and retry") + + # Get package name and folder + (package_name, folder_name) = self.getPackageNameAndFolder() + + # Get package links + package_links = self.getLinks() + + # Set package + self.packages = [(package_name, package_links, folder_name)] + + def isPasswordProtected(self): + + if '<input type="password" name="password"' in self.html: + self.logDebug("Links are password protected") + return True + return False + + def submitPassword(self): + # Gather data + try: + m = re.match(self.__pattern__, self.pyfile.url) + id = max(m.group('id1'), m.group('id2')) + except AttributeError: + self.logDebug("Unable to get package id from url [%s]" % self.pyfile.url) + return + url = "http://netfolder.in/folder.php?folder_id=" + id + password = self.getPassword() + + # Submit package password + post = {'password': password, 'save': 'Absenden'} + self.logDebug("Submitting password [%s] for protected links with id [%s]" % (password, id)) + html = self.load(url, {}, post) + + # Check for invalid password + if '<div class="InPage_Error">' in html: + self.logDebug("Incorrect password, please set right password on Edit package form and retry") + return None + + return html + + def getLinks(self): + links = re.search(r'name="list" value="(.*?)"', self.html).group(1).split(",") + self.logDebug("Package has %d links" % len(links)) + return links diff --git a/pyload/plugins/crypter/OneKhDe.py b/pyload/plugins/crypter/OneKhDe.py new file mode 100644 index 000000000..c77203187 --- /dev/null +++ b/pyload/plugins/crypter/OneKhDe.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import re + +from module.unescape import unescape +from module.plugins.Crypter import Crypter + +class OneKhDe(Crypter): + __name__ = "OneKhDe" + __type__ = "container" + __pattern__ = r"http://(www\.)?1kh.de/f/" + __version__ = "0.1" + __description__ = """1kh.de Container Plugin""" + __author_name__ = ("spoob") + __author_mail__ = ("spoob@pyload.org") + + def __init__(self, parent): + Crypter.__init__(self, parent) + self.parent = parent + self.html = None + + def file_exists(self): + """ returns True or False + """ + return True + + def proceed(self, url, location): + url = self.parent.url + self.html = self.req.load(url) + temp_links = [] + link_ids = re.findall(r"<a id=\"DownloadLink_(\d*)\" href=\"http://1kh.de/", self.html) + for id in link_ids: + new_link = unescape(re.search("width=\"100%\" src=\"(.*)\"></iframe>", self.req.load("http://1kh.de/l/" + id)).group(1)) + temp_links.append(new_link) + self.links = temp_links diff --git a/pyload/plugins/crypter/OronComFolder.py b/pyload/plugins/crypter/OronComFolder.py new file mode 100755 index 000000000..726371966 --- /dev/null +++ b/pyload/plugins/crypter/OronComFolder.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- + +import re + +from module.plugins.Crypter import Crypter + +class OronComFolder(Crypter): + __name__ = "OronComFolder" + __type__ = "crypter" + __pattern__ = r"http://(?:www\.)?oron.com/folder/\w+" + __version__ = "0.2" + __description__ = """Oron.com Folder Plugin""" + __author_name__ = ("DHMH") + __author_mail__ = ("webmaster@pcProfil.de") + + FOLDER_PATTERN = r'<table(?:.*)class="tbl"(?:.*)>(?:.*)<table(?:.*)class="tbl2"(?:.*)>(?P<body>.*)</table>(?:.*)</table>' + LINK_PATTERN = r'<a href="([^"]+)" target="_blank">' + + def decryptURL(self, url): + html = self.load(url) + + new_links = [] + + if 'No such folder exist' in html: + # Don't fail because if there's more than a folder for this package + # and only one of them fails, no urls at all will be added. + self.logWarning("Folder does not exist") + return new_links + + folder = re.search(self.FOLDER_PATTERN, html, re.DOTALL) + if folder is None: + # Don't fail because if there's more than a folder for this package + # and only one of them fails, no urls at all will be added. + self.logWarning("Parse error (FOLDER)") + return new_links + + new_links.extend(re.findall(self.LINK_PATTERN, folder.group(0))) + + if new_links: + self.logDebug("Found %d new links" % len(new_links)) + return new_links + else: + # Don't fail because if there's more than a folder for this package + # and only one of them fails, no urls at all will be added. + self.logWarning('Could not extract any links') + return new_links diff --git a/pyload/plugins/crypter/QuickshareCzFolder.py b/pyload/plugins/crypter/QuickshareCzFolder.py new file mode 100644 index 000000000..6cb049935 --- /dev/null +++ b/pyload/plugins/crypter/QuickshareCzFolder.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- + +import re +from module.plugins.Crypter import Crypter + +class QuickshareCzFolder(Crypter): + __name__ = "QuickshareCzFolder" + __type__ = "crypter" + __pattern__ = r"http://(www\.)?quickshare.cz/slozka-\d+.*" + __version__ = "0.1" + __description__ = """Quickshare.cz Folder Plugin""" + __author_name__ = ("zoidberg") + __author_mail__ = ("zoidberg@mujmail.cz") + + + FOLDER_PATTERN = r'<textarea[^>]*>(.*?)</textarea>' + LINK_PATTERN = r'(http://www.quickshare.cz/\S+)' + + def decrypt(self, pyfile): + html = self.load(self.pyfile.url) + + new_links = [] + found = re.search(self.FOLDER_PATTERN, html, re.DOTALL) + if found is None: self.fail("Parse error (FOLDER)") + new_links.extend(re.findall(self.LINK_PATTERN, found.group(1))) + + if new_links: + self.core.files.addLinks(new_links, self.pyfile.package().id) + else: + self.fail('Could not extract any links') \ No newline at end of file diff --git a/pyload/plugins/crypter/RSDF.py b/pyload/plugins/crypter/RSDF.py new file mode 100644 index 000000000..cbc9864b1 --- /dev/null +++ b/pyload/plugins/crypter/RSDF.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import base64 +import binascii +import re + +from module.plugins.Crypter import Crypter + +class RSDF(Crypter): + __name__ = "RSDF" + __version__ = "0.21" + __pattern__ = r".*\.rsdf" + __description__ = """RSDF Container Decode Plugin""" + __author_name__ = ("RaNaN", "spoob") + __author_mail__ = ("RaNaN@pyload.org", "spoob@pyload.org") + + + def decrypt(self, pyfile): + + from Crypto.Cipher import AES + + infile = pyfile.url.replace("\n", "") + Key = binascii.unhexlify('8C35192D964DC3182C6F84F3252239EB4A320D2500000000') + + IV = binascii.unhexlify('FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF') + IV_Cipher = AES.new(Key, AES.MODE_ECB) + IV = IV_Cipher.encrypt(IV) + + obj = AES.new(Key, AES.MODE_CFB, IV) + + rsdf = open(infile, 'r') + + data = rsdf.read() + rsdf.close() + + if re.search(r"<title>404 - Not Found", data) is None: + data = binascii.unhexlify(''.join(data.split())) + data = data.splitlines() + + links = [] + for link in data: + link = base64.b64decode(link) + link = obj.decrypt(link) + decryptedUrl = link.replace('CCF: ', '') + links.append(decryptedUrl) + + self.log.debug("%s: adding package %s with %d links" % (self.__name__,pyfile.package().name,len(links))) + self.packages.append((pyfile.package().name, links)) diff --git a/pyload/plugins/crypter/RSLayerCom.py b/pyload/plugins/crypter/RSLayerCom.py new file mode 100644 index 000000000..6e4266f2e --- /dev/null +++ b/pyload/plugins/crypter/RSLayerCom.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import re + +from module.plugins.Crypter import Crypter +from module.lib.BeautifulSoup import BeautifulSoup +from module.unescape import unescape + +class RSLayerCom(Crypter): + __name__ = "RSLayerCom" + __type__ = "container" + __pattern__ = r"http://(www\.)?rs-layer.com/directory-" + __config__ = [] + __version__ = "0.2" + __description__ = """RS-Layer.com Container Plugin""" + __author_name__ = ("hzpz") + __author_mail__ = ("none") + + def decrypt(self, pyfile): + url = pyfile.url + src = self.req.load(str(url)) + + soup = BeautifulSoup(src) + captchaTag = soup.find("img", attrs={"id": "captcha_image"}) + if captchaTag: + captchaUrl = "http://rs-layer.com/" + captchaTag["src"] + self.logDebug("Captcha URL: %s" % captchaUrl) + result = self.decryptCaptcha(str(captchaUrl), imgtype="png") + captchaInput = soup.find("input", attrs={"id": "captcha"}) + self.req.lastUrl = url + src = self.req.load(str(url), post={'captcha_input': result, 'image_name': captchaTag["src"]}) + + link_ids = re.findall(r"onclick=\"getFile\(\'([0-9]{7}-.{8})\'\);changeBackgroundColor", src) + + if not len(link_ids) > 0: + self.retry() + + self.correctCaptcha() + + links = [] + for id in link_ids: + self.logDebug("ID: %s" % id) + new_link = unescape(re.search(r"' + + + def setup(self): + self.fileid = None + self.package = None + self.password = None + self.html = None + self.captcha = False + + def decrypt(self, pyfile): + + # Init + self.initPackage(pyfile) + + # Request package + self.requestPackage() + + # Check for online + if not self.isOnline(): + self.offline() + + # Check for protection + if self.isPasswordProtected(): + self.unlockPasswordProtection() + self.handleErrors() + + if self.isCaptchaProtected(): + self.captcha = True + self.unlockCaptchaProtection() + self.handleErrors() + + # Get package name and folder + (package_name, folder_name) = self.getPackageInfo() + + # Extract package links + package_links = [] + for sources in self.PREFERRED_LINK_SOURCES: + package_links.extend(self.handleLinkSource(sources)) + if package_links: # use only first source which provides links + break + package_links = set(package_links) + + # Pack + if package_links: + self.packages = [(package_name, package_links, folder_name)] + else: + self.fail('Could not extract any links') + + def initPackage(self, pyfile): + self.fileid = re.match(self.__pattern__, pyfile.url).group('id') + self.package = pyfile.package() + self.password = self.getPassword() + self.url = pyfile.url + + def requestPackage(self): + self.html = self.load(self.url, decode = True) + + def isOnline(self): + if self.OFFLINE_TOKEN in self.html: + self.logDebug("File not found") + return False + return True + + def isPasswordProtected(self): + if self.PASSWORD_TOKEN in self.html: + self.logDebug("Links are password protected") + return True + + def isCaptchaProtected(self): + if self.CAPTCHA_TOKEN in self.html: + self.logDebug("Links are captcha protected") + return True + return False + + def unlockPasswordProtection(self): + self.logDebug("Submitting password [%s] for protected links" % self.password) + passwd_url = self.PASSWORD_SUBMIT_URL + "?id=%s" % self.fileid + passwd_data = { 'id': self.fileid, 'password': self.password, 'pw': 'submit' } + self.html = self.load(passwd_url, post=passwd_data, decode=True) + + def unlockCaptchaProtection(self): + self.logDebug("Request user positional captcha resolving") + captcha_img_url = self.CAPTCHA_IMG_URL + "?id=%s" % self.fileid + coords = self.decryptCaptcha(captcha_img_url, forceUser=True, imgtype="png", result_type='positional') + self.logDebug("Captcha resolved, coords [%s]" % str(coords)) + captcha_post_url = self.CAPTCHA_SUBMIT_URL + "?id=%s" % self.fileid + captcha_post_data = { 'button.x': coords[0], 'button.y': coords[1], 'captcha': 'submit' } + self.html = self.load(captcha_post_url, post=captcha_post_data, decode=True) + + def getPackageInfo(self): + name = folder = None + + # Try to get info from web + m = re.search(self.FILE_TITLE_REGEX, self.html) + if m is not None: + title = m.group(1).strip() + if not self.FILE_NOTITLE in title: + name = folder = title + self.logDebug("Found name [%s] and folder [%s] in package info" % (name, folder)) + + # Fallback to defaults + if not name or not folder: + name = self.package.name + folder = self.package.folder + self.logDebug("Package info not found, defaulting to pyfile name [%s] and folder [%s]" % (name, folder)) + + # Return package info + return name, folder + + def handleErrors(self): + if self.PASSWORD_ERROR_ROKEN in self.html: + msg = "Incorrect password, please set right password on 'Edit package' form and retry" + self.logDebug(msg) + self.fail(msg) + + if self.captcha: + if self.CAPTCHA_ERROR_ROKEN in self.html: + self.logDebug("Invalid captcha, retrying") + self.invalidCaptcha() + self.retry() + else: + self.correctCaptcha() + + def handleLinkSource(self, source): + if source == 'cnl2': + return self.handleCNL2Links() + elif source == 'dlc': + return self.handleDLCLinks() + elif source == 'web': + return self.handleWEBLinks() + else: + self.fail('Unknown source [%s] (this is probably a bug)' % source) + + def handleCNL2Links(self): + self.logDebug("Search for CNL2 links") + package_links = [] + m = re.search(self.CNL2_FORM_REGEX, self.html, re.DOTALL) + if m is not None: + cnl2_form = m.group(1) + try: + (vcrypted, vjk) = self._getCipherParams(cnl2_form) + for (crypted, jk) in zip(vcrypted, vjk): + package_links.extend(self._getLinks(crypted, jk)) + except: + self.logDebug("Unable to decrypt CNL2 links") + return package_links + + def handleDLCLinks(self): + self.logDebug('Search for DLC links') + package_links = [] + m = re.search(self.DLC_LINK_REGEX, self.html) + if m is not None: + container_url = self.DLC_DOWNLOAD_URL + "?id=%s&dlc=1" % self.fileid + self.logDebug("Downloading DLC container link [%s]" % container_url) + try: + dlc = self.load(container_url) + dlc_filename = self.fileid + ".dlc" + dlc_filepath = os.path.join(self.config["general"]["download_folder"], dlc_filename) + f = open(dlc_filepath, "wb") + f.write(dlc) + f.close() + package_links.append(dlc_filepath) + except: + self.logDebug("Unable to download DLC container") + return package_links + + def handleWEBLinks(self): + self.logDebug("Search for WEB links") + package_links = [] + fw_params = re.findall(self.WEB_FORWARD_REGEX, self.html) + self.logDebug("Decrypting %d Web links" % len(fw_params)) + for index, fw_param in enumerate(fw_params): + try: + fw_url = self.WEB_FORWARD_URL + "?%s" % fw_param + self.logDebug("Decrypting Web link %d, %s" % (index+1, fw_url)) + fw_response = self.load(fw_url, decode=True) + dl_link = re.search(self.WEB_LINK_REGEX, fw_response).group('link') + package_links.append(dl_link) + except Exception, detail: + self.logDebug("Error decrypting Web link %s, %s" % (index, detail)) + self.setWait(4) + self.wait() + return package_links + + def _getCipherParams(self, cnl2_form): + + # Get jk + jk_re = self.CNL2_FORMINPUT_REGEX % self.CNL2_JK_KEY + vjk = re.findall(jk_re, cnl2_form, re.IGNORECASE) + + # Get crypted + crypted_re = self.CNL2_FORMINPUT_REGEX % RelinkUs.CNL2_CRYPTED_KEY + vcrypted = re.findall(crypted_re, cnl2_form, re.IGNORECASE) + + # Log and return + self.logDebug("Detected %d crypted blocks" % len(vcrypted)) + return vcrypted, vjk + + def _getLinks(self, crypted, jk): + + # Get key + jreturn = self.js.eval("%s f()" % jk) + self.logDebug("JsEngine returns value [%s]" % jreturn) + key = binascii.unhexlify(jreturn) + + # Decode crypted + crypted = base64.standard_b64decode(crypted) + + # Decrypt + Key = key + IV = key + obj = AES.new(Key, AES.MODE_CBC, IV) + text = obj.decrypt(crypted) + + # Extract links + text = text.replace("\x00", "").replace("\r", "") + links = text.split("\n") + links = filter(lambda x: x != "", links) + + # Log and return + self.logDebug("Package has %d links" % len(links)) + return links diff --git a/pyload/plugins/crypter/SecuredIn.py b/pyload/plugins/crypter/SecuredIn.py new file mode 100644 index 000000000..e41896c5f --- /dev/null +++ b/pyload/plugins/crypter/SecuredIn.py @@ -0,0 +1,334 @@ +# -*- coding: utf-8 -*- + +import re + +from module.plugins.Crypter import Crypter +from module.lib.BeautifulSoup import BeautifulSoup + +from math import ceil + +class SecuredIn(Crypter): + __name__ = "SecuredIn" + __type__ = "container" + __pattern__ = r"http://[\w\.]*?secured\.in/download-[\d]+-[\w]{8}\.html" + __version__ = "0.1" + __description__ = """secured.in Container Plugin""" + __author_name__ = ("mkaay") + __author_mail__ = ("mkaay@mkaay.de") + + def __init__(self, parent): + Crypter.__init__(self, parent) + self.parent = parent + self.html = None + self.multi_dl = False + + def file_exists(self): + return True + + def proceed(self, url, location): + links = [] + ajaxUrl = "http://secured.in/ajax-handler.php" + src = self.req.load(url, cookies=True) + soup = BeautifulSoup(src) + img = soup.find("img", attrs={"id":"captcha_img"}) + for i in range(3): + form = soup.find("form", attrs={"id":"frm_captcha"}) + captchaHash = form.find("input", attrs={"id":"captcha_hash"})["value"] + captchaUrl = "http://secured.in/%s" % img["src"] + captchaData = self.req.load(str(captchaUrl), cookies=True) + result = self.waitForCaptcha(captchaData, "jpg") + src = self.req.load(url, cookies=True, post={"captcha_key":result, "captcha_hash":captchaHash}) + soup = BeautifulSoup(src) + img = soup.find("img", attrs={"id":"captcha_img"}) + if not img: + files = soup.findAll("tr", attrs={"id":re.compile("file-\d+")}) + dlIDPattern = re.compile("accessDownload\(\d, \d+, '(.*?)', \d\)") + cypher = self.Cypher() + for cfile in files: + m = dlIDPattern.search(cfile["onclick"]) + if m: + crypted = self.req.load(ajaxUrl, cookies=True, post={"cmd":"download", "download_id":m.group(1)}) + cypher.reset() + link = cypher.cypher(crypted) + links.append(link) + break + self.links = links + + class Cypher(): + def __init__(self): + self.reset() + + def reset(self): + self.iatwbfrd = [ + 0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, 0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99, 0x24a19947, 0xb3916cf7, 0x0801f2e2, + 0x858efc16, 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e, 0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee, 0x7b54a41d, 0xc25a59b5, + 0x9c30d539, 0x2af26013, 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef, 0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e, 0xd71577c1, + 0xbd314b27, 0x78af2fda, 0x55605c60, 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440, 0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce, + 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a, 0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e, 0xafd6ba33, 0x6c24cf5c, 0x7a325381, + 0x28958677, 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193, 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032, 0xef845d5d, 0xe98575b1, + 0xdc262302, 0xeb651b88, 0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239, 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e, 0x21c66842, + 0xf6e96c9a, 0x670c9c61, 0xabd388f0, 0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3, 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98, + 0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88, 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe, 0xe06f75d8, 0x85c12073, 0x401a449f, + 0x56c16aa6, 0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d, 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b, 0x075372c9, 0x80991b7b, + 0x25d479d8, 0xf6e8def7, 0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba, 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463, 0x68fb6faf, + 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f, 0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09, 0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3, + 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb, 0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279, 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, + 0xdb3222f8, 0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab, 0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82, 0x9e5c57bb, 0xca6f8ca0, + 0x1a87562e, 0xdf1769db, 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573, 0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0, 0x10fa3d98, + 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b, 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790, 0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8, + 0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4, 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0, 0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, + 0x8e7594b7, 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c, 0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad, 0x2f2f2218, 0xbe0e1777, + 0xea752dfe, 0x8b021fa1, 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299, 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9, 0x165fa266, + 0x80957705, 0x93cc7314, 0x211a1477, 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf, 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49, + 0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af, 0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa, 0x78c14389, 0xd95a537f, 0x207d5ba2, + 0x02e5b9c5, 0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41, 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915, 0xd60f573f, 0xbc9bc6e4, + 0x2b60a476, 0x81e67400, 0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915, 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664, 0x53b02d5d, + 0xa99f8fa1, 0x08ba4799, 0x6e85076a + ] + + self.olkemfjq = [ + 0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89, 0x452821e6, 0x38d01377, 0xbe5466cf, + 0x34e90c6c, 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917, 0x9216d5d9, 0x8979fb1b + ] + + self.oqlaoymh = 0 + self.oqmykrna = 0 + self.pqmyzkid = 0 + self.pldmjnde = 0 + self.ldiwkqly = 0 + + self.plkodnyq = [ + 0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b, 0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe, 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, + 0xc700c47b, 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4, 0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8, 0x530ff8ee, 0x468dde7d, + 0xd5730a1d, 0x4cd04dc6, 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304, 0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22, 0xc089c2b8, + 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4, 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6, 0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9, + 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59, 0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593, 0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, + 0x022b8b51, 0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28, 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c, 0xe029ac71, 0xe019a5e6, + 0x47b0acfd, 0xed93fa9b, 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28, 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c, 0x15056dd4, + 0x88f46dba, 0x03a16125, 0x0564f0bd, 0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a, 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319, + 0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb, 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f, 0x4de81751, 0x3830dc8e, 0x379d5862, + 0x9320f991, 0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32, 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680, 0xa2ae0810, 0xdd6db224, + 0x69852dfd, 0x09072166, 0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae, 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb, 0xdda26a7e, + 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5, 0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47, 0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370, + 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d, 0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84, 0xe1b00428, 0x95983a1d, 0x06b89fb4, + 0xce6ea048, 0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8, 0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd, 0xa08839e1, 0x51ce794b, + 0x2f32c9b7, 0xa01fbac9, 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7, 0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38, 0x0339c32a, + 0xc6913667, 0x8df9317c, 0xe0b12b4f, 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c, 0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525, + 0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1, 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442, 0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, + 0x3278e964, 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e, 0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8, 0xdf359f8d, 0x9b992f2e, + 0xe60b6f47, 0x0fe3f11d, 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f, 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299, 0xf523f357, + 0xa6327623, 0x93a83531, 0x56cccd02, 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc, 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614, + 0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a, 0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6, 0x71126905, 0xb2040222, 0xb6cbcf7c, + 0xcd769c2b, 0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0, 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060, 0x85cbfe4e, 0x8ae88dd8, + 0x7aaaf9b0, 0x4cf9aa7e, 0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9, 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f, 0xb74e6132, + 0xce77e25b, 0x578fdfe3, 0x3ac372e6 + ] + + self.pnjzokye = None + + self.thdlpsmy = [ + 0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934, 0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068, 0xd4082471, 0x3320f46a, 0x43b7d4b7, + 0x500061af, 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840, 0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45, 0xbfbc09ec, 0x03bd9785, + 0x7fac6dd0, 0x31cb8504, 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a, 0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb, 0x68dc1462, + 0xd7486900, 0x680ec0a4, 0x27a18dee, 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6, 0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42, + 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b, 0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2, 0x3a6efa74, 0xdd5b4332, 0x6841e7f7, + 0xca7820fb, 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527, 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b, 0x55a867bc, 0xa1159a58, + 0xcca92963, 0x99e1db33, 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c, 0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3, 0x95c11548, + 0xe4c66d22, 0x48c1133f, 0xc70f86dc, 0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17, 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564, + 0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b, 0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115, 0x6b2395e0, 0x333e92e1, 0x3b240b62, + 0xeebeb922, 0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728, 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0, 0x5449a36f, 0x877d48fa, + 0xc39dfd27, 0xf33e8d1e, 0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37, 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d, 0xc67b5510, + 0x6d672c37, 0x2765d43b, 0xdcd0e804, 0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b, 0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3, + 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb, 0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d, 0x6842ada7, 0xc66a2b3b, 0x12754ccc, + 0x782ef11c, 0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350, 0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9, 0x44421659, 0x0a121386, + 0xd90cec6e, 0xd5abea2a, 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe, 0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d, 0xd1fd8346, + 0xf6381fb0, 0x7745ae04, 0xd736fccc, 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f, 0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61, + 0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2, 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9, 0x7aeb2661, 0x8b1ddf84, 0x846a0e79, + 0x915f95e2, 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c, 0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e, 0xb77f19b6, 0xe0a9dc09, + 0x662d09a1, 0xc4324633, 0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10, 0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169, 0xdcb7da83, + 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52, 0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027, 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5, + 0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62, 0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634, 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, + 0xce591d76, 0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24, 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc, 0xed545578, 0x08fca5b5, + 0xd83d7cd3, 0x4dad0fc4, 0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c, 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837, 0xd79a3234, + 0x92638212, 0x670efa8e, 0x406000e0 + ] + + self.ybghjtik = [ + 0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623, 0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266, 0xecaa8c71, 0x699a17ff, 0x5664526c, + 0xc2b19ee1, 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e, 0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6, 0xa1d29c07, 0xefe830f5, + 0x4d2d38e6, 0xf0255dc1, 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e, 0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1, 0x687f3584, + 0x52a0e286, 0xb79c5305, 0xaa500737, 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8, 0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff, + 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd, 0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701, 0x3ae5e581, 0x37c2dadc, 0xc8b57634, + 0x9af3dda7, 0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41, 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331, 0x4e548b38, 0x4f6db908, + 0x6f420d03, 0xf60a04bf, 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af, 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e, 0x5512721f, + 0x2e6b7124, 0x501adde6, 0x9f84cd87, 0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c, 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2, + 0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16, 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd, 0x71dff89e, 0x10314e55, 0x81ac77d6, + 0x5f11199b, 0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509, 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e, 0x86e34570, 0xeae96fb1, + 0x860e5e0a, 0x5a3e2ab3, 0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f, 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a, 0xc6150eba, + 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4, 0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960, 0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66, + 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28, 0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802, 0xeecea50f, 0xdb2f953b, 0x2aef7dad, + 0x5b6e2f84, 0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510, 0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf, 0xb5735c90, 0x4c70a239, + 0xd59e9e0b, 0xcbaade14, 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e, 0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50, 0x40685a32, + 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7, 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8, 0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281, + 0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99, 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696, 0xcdb30aeb, 0x532e3054, 0x8fd948e4, + 0x6dbc3128, 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73, 0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0, 0x45eee2b6, 0xa3aaabea, + 0xdb6c4f15, 0xfacb4fd0, 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105, 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250, 0xcf62a1f2, + 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3, 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285, 0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00, + 0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061, 0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb, 0x7cde3759, 0xcbee7460, 0x4085f2a7, + 0xce77326e, 0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735, 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc, 0x9e447a2e, 0xc3453484, + 0xfdd56705, 0x0e1e9ec9, 0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340, 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20, 0x153e21e7, + 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7 + ] + + def cypher(self, code): + return self.lskdqpyr(code, "") + + def lskdqpyr(self, alokfmth, yoaksjdh): + if self.pnjzokye is None or self.pnjzokye.lower() == yoaksjdh: + self.yoliukev(yoaksjdh) + self.pnjzokye = yoaksjdh + alokfmth = self.plaomtje(alokfmth) + ykaiumgp = "" + alokijuh = len(alokfmth) + lokimyas = self.ylomiktb(alokfmth[0:8]) + palsiuzt = lokimyas[0] + tzghbndf = lokimyas[1] + awsedrft = [None, None] + for kiujzhqa in range(8, alokijuh, 8): + lokimyas = self.ylomiktb(alokfmth[kiujzhqa:kiujzhqa+8]) + awsedrft[0] = lokimyas[0] + awsedrft[1] = lokimyas[1] + lokimyas = self.okaqnhlp(lokimyas[0], lokimyas[1]) + lokimyas[0] ^= palsiuzt + lokimyas[1] ^= tzghbndf + palsiuzt = awsedrft[0] + tzghbndf = awsedrft[1] + ykaiumgp += self.ykijmtkd(lokimyas) + return ykaiumgp + + def okaqnhlp(self, lahgrnvp, trenlpys): + ujhaqylw = 0 + for yalmhopr in range(17, 1, -1): + lahgrnvp ^= self.ldiwkqly[yalmhopr] + trenlpys ^= (self.oqlaoymh[lahgrnvp >> 24 & 0xff] + self.oqmykrna[lahgrnvp >> 16 & 0xff] ^ self.pqmyzkid[lahgrnvp >> 8 & 0xff]) + self.pldmjnde[lahgrnvp & 0xff] + ujhaqylw = lahgrnvp + lahgrnvp = trenlpys + trenlpys = ujhaqylw + ujhaqylw = lahgrnvp + lahgrnvp = trenlpys + trenlpys = ujhaqylw + trenlpys ^= self.ldiwkqly[1] + lahgrnvp ^= self.ldiwkqly[0] + return [lahgrnvp, trenlpys] + + def plaomtje(self, yoiumqpy): + qkailkzt = "" + xoliuzem = 0 + lyomiujt = 0 + yploemju = -1 + for i in range(0, len(yoiumqpy)): + yploamzu = ord(yoiumqpy[i]) + if ord('A') <= yploamzu <= ord('Z'): + xoliuzem = ord(yoiumqpy[i]) - 65 + elif ord('a') <= yploamzu <= ord('z'): + xoliuzem = ord(yoiumqpy[i]) - 97 + 26 + elif ord('0') <= yploamzu <= ord('9'): + xoliuzem = ord(yoiumqpy[i]) - 48 + 52 + elif yploamzu == ord('+'): + xoliuzem = 62 + elif yploamzu == ord('/'): + xoliuzem = 63 + else: + continue + yploemju += 1 + + lxkdmizj = 0 + switch = yploemju % 4 + if switch == 0: + lyomiujt = xoliuzem + continue + elif switch == 1: + lxkdmizj = lyomiujt << 2 | xoliuzem >> 4 + lyomiujt = xoliuzem & 0x0F + elif switch == 2: + lxkdmizj = lyomiujt << 4 | xoliuzem >> 2 + lyomiujt = xoliuzem & 0x03 + elif switch == 3: + lxkdmizj = lyomiujt << 6 | xoliuzem >> 0 + lyomiujt = xoliuzem & 0x00 + qkailkzt += unichr(lxkdmizj) + return qkailkzt + + def qmyjuila(self, oqlamykt, yalkionj): + dolizmvw = 0 + for iumswkya in range(0, 16): + oqlamykt ^= self.ldiwkqly[iumswkya] + yalkionj ^= (self.oqlaoymh[oqlamykt >> 24 & 0xff] + self.oqmykrna[oqlamykt >> 16 & 0xff] ^ self.pqmyzkid[oqlamykt >> 8 & 0xff]) + self.pldmjnde[oqlamykt & 0xff] + dolizmvw = oqlamykt + oqlamykt = yalkionj + yalkionj = dolizmvw + dolizmvw = oqlamykt + oqlamykt = yalkionj + yalkionj = dolizmvw + yalkionj ^= self.ldiwkqly[16] + oqlamykt ^= self.ldiwkqly[17] + return [oqlamykt, yalkionj] + + def ykijmtkd(self, yoirlkqw): + loipamyu = len(yoirlkqw) + yoirlkqwchar = [] + for ymujtnbq in range(0, loipamyu): + yoir = [yoirlkqw[ymujtnbq] >> 24 & 0xff, yoirlkqw[ymujtnbq] >> 16 & 0xff, yoirlkqw[ymujtnbq] >> 8 & 0xff, yoirlkqw[ymujtnbq] & 0xff] + for c in yoir: + yoirlkqwchar.append(chr(c)) + return "".join(yoirlkqwchar) + + def ylomiktb(self, lofiuzmq): + plokimqw = int(ceil(len(lofiuzmq) / 4.0)) + lopkisdq = [] + for ypoqlktz in range(0, plokimqw): + lopkisdq.append(ord(lofiuzmq[(ypoqlktz << 2) + 3]) + (ord(lofiuzmq[(ypoqlktz << 2) + 2]) << 8) + (ord(lofiuzmq[(ypoqlktz << 2) + 1]) << 16) + (ord(lofiuzmq[(ypoqlktz << 2)]) << 24)) + return lopkisdq + + def yoliukev(self, kaiumylq): + self.oqlaoymh = self.iatwbfrd + self.oqmykrna = self.ybghjtik + self.pqmyzkid = self.thdlpsmy + self.pldmjnde = self.plkodnyq + + yaqpolft = [0 for i in range(len(kaiumylq))] + + yaqwsedr = 0 + btzqwsay = 0 + while yaqwsedr < len(kaiumylq): + wlqoakmy = 0 + for lopiuztr in range(0, 4): + wlqoakmy = wlqoakmy << 8 | ord(kaiumylq[yaqwsedr % len(kaiumylq)]) + yaqwsedr += 1 + yaqpolft[btzqwsay] = wlqoakmy + btzqwsay += 1 + self.ldiwkqly = [] + for btzqwsay in range(0, 18): + self.ldiwkqly.append(self.olkemfjq[btzqwsay]) + yalopiuq = [0, 0] + for btzqwsay in range(0, 18, 2): + yalopiuq = self.qmyjuila(yalopiuq[0], yalopiuq[1]) + self.ldiwkqly[btzqwsay] = yalopiuq[0] + self.ldiwkqly[btzqwsay + 1] = yalopiuq[1] + for btzqwsay in range(0, 256, 2): + yalopiuq = self.qmyjuila(yalopiuq[0], yalopiuq[1]) + self.oqlaoymh[btzqwsay] = yalopiuq[0] + self.oqlaoymh[btzqwsay + 1] = yalopiuq[1] + for btzqwsay in range(0, 256, 2): + yalopiuq = self.qmyjuila(yalopiuq[0], yalopiuq[1]) + self.oqmykrna[btzqwsay] = yalopiuq[0] + self.oqmykrna[btzqwsay + 1] = yalopiuq[1] + for btzqwsay in range(0, 256, 2): + yalopiuq = self.qmyjuila(yalopiuq[0], yalopiuq[1]) + self.pqmyzkid[btzqwsay] = yalopiuq[0] + self.pqmyzkid[btzqwsay + 1] = yalopiuq[1] + for btzqwsay in range(0, 256, 2): + yalopiuq = self.qmyjuila(yalopiuq[0], yalopiuq[1]) + self.pldmjnde[btzqwsay] = yalopiuq[0] + self.pldmjnde[btzqwsay + 1] = yalopiuq[1] + diff --git a/pyload/plugins/crypter/SerienjunkiesOrg.py b/pyload/plugins/crypter/SerienjunkiesOrg.py new file mode 100644 index 000000000..3fcc12e36 --- /dev/null +++ b/pyload/plugins/crypter/SerienjunkiesOrg.py @@ -0,0 +1,321 @@ +# -*- coding: utf-8 -*- + +import re +from time import sleep +import random +from module.plugins.Crypter import Crypter +from module.lib.BeautifulSoup import BeautifulSoup +from module.unescape import unescape + +class SerienjunkiesOrg(Crypter): + __name__ = "SerienjunkiesOrg" + __type__ = "container" + __pattern__ = r"http://.*?(serienjunkies.org|dokujunkies.org)/.*?" + __version__ = "0.38" + __config__ = [ + ("changeNameSJ", "Packagename;Show;Season;Format;Episode", "Take SJ.org name", "Show"), + ("changeNameDJ", "Packagename;Show;Format;Episode", "Take DJ.org name", "Show"), + ("randomPreferred", "bool", "Randomize Preferred-List", False), + ("hosterListMode", "OnlyOne;OnlyPreferred(One);OnlyPreferred(All);All", "Use for hosters (if supported)", "All"), + ("hosterList", "str", "Preferred Hoster list (comma separated)", "RapidshareCom,UploadedTo,NetloadIn,FilefactoryCom,FreakshareNet,FilebaseTo,HotfileCom,DepositfilesCom,EasyshareCom,KickloadCom"), + ("ignoreList", "str", "Ignored Hoster list (comma separated)", "MegauploadCom") + ] + __description__ = """serienjunkies.org Container Plugin""" + __author_name__ = ("mkaay", "godofdream") + __author_mail__ = ("mkaay@mkaay.de", "soilfiction@gmail.com") + + + def setup(self): + self.multiDL = False + + def getSJSrc(self, url): + src = self.req.load(str(url)) + if "This website is not available in your country" in src: + self.fail("Not available in your country") + if not src.find("Enter Serienjunkies") == -1: + sleep(1) + src = self.req.load(str(url)) + return src + + def handleShow(self, url): + src = self.getSJSrc(url) + soup = BeautifulSoup(src) + packageName = self.pyfile.package().name + if self.getConfig("changeNameSJ") == "Show": + found = unescape(soup.find("h2").find("a").string.split(' –')[0]) + if found: + packageName = found + + nav = soup.find("div", attrs={"id": "scb"}) + + package_links = [] + for a in nav.findAll("a"): + if self.getConfig("changeNameSJ") == "Show": + package_links.append(a["href"]) + else: + package_links.append(a["href"] + "#hasName") + if self.getConfig("changeNameSJ") == "Show": + self.packages.append((packageName, package_links, packageName)) + else: + self.core.files.addLinks(package_links, self.pyfile.package().id) + + + def handleSeason(self, url): + src = self.getSJSrc(url) + soup = BeautifulSoup(src) + post = soup.find("div", attrs={"class": "post-content"}) + ps = post.findAll("p") + + seasonName = unescape(soup.find("a", attrs={"rel": "bookmark"}).string).replace("–", "-") + groups = {} + gid = -1 + for p in ps: + if re.search("Sprache|Format", str(p)): + var = p.findAll("strong") + opts = {"Sprache": "", "Format": ""} + for v in var: + n = unescape(v.string).strip() + n = re.sub(r"^([:]?)(.*?)([:]?)$", r'\2', n) + if n.strip() not in opts: + continue + val = v.nextSibling + if not val: + continue + val = val.replace("|", "").strip() + val = re.sub(r"^([:]?)(.*?)([:]?)$", r'\2', val) + opts[n.strip()] = val.strip() + gid += 1 + groups[gid] = {} + groups[gid]["ep"] = {} + groups[gid]["opts"] = opts + elif re.search("Download:", str(p)): + parts = str(p).split("
    ") + if re.search("", parts[0]): + ename = re.search('(.*?)',parts[0]).group(1).strip().decode("utf-8").replace("–", "-") + groups[gid]["ep"][ename] = {} + parts.remove(parts[0]) + for part in parts: + hostername = re.search(" \| ([-a-zA-Z0-9]+\.\w+)",part) + if hostername: + hostername = hostername.group(1) + groups[gid]["ep"][ename][hostername] = [] + links = re.findall('href="(.*?)"',part) + for link in links: + groups[gid]["ep"][ename][hostername].append(link + "#hasName") + + links = [] + for g in groups.values(): + for ename in g["ep"]: + links.extend(self.getpreferred(g["ep"][ename])) + if self.getConfig("changeNameSJ") == "Episode": + self.packages.append((ename, links, ename)) + links = [] + package = "%s (%s, %s)" % (seasonName, g["opts"]["Format"], g["opts"]["Sprache"]) + if self.getConfig("changeNameSJ") == "Format": + self.packages.append((package, links, package)) + links = [] + if (self.getConfig("changeNameSJ") == "Packagename") or re.search("#hasName", url): + self.core.files.addLinks(links, self.pyfile.package().id) + elif (self.getConfig("changeNameSJ") == "Season") or not re.search("#hasName", url): + self.packages.append((seasonName, links, seasonName)) + + def handleEpisode(self, url): + src = self.getSJSrc(url) + if not src.find( + "Du hast das Download-Limit überschritten! Bitte versuche es später nocheinmal.") == -1: + self.fail(_("Downloadlimit reached")) + else: + soup = BeautifulSoup(src) + form = soup.find("form") + h1 = soup.find("h1") + + if h1.get("class") == "wrap": + captchaTag = soup.find(attrs={"src": re.compile("^/secure/")}) + if not captchaTag: + sleep(5) + self.retry() + + captchaUrl = "http://download.serienjunkies.org" + captchaTag["src"] + result = self.decryptCaptcha(str(captchaUrl), imgtype="png") + sinp = form.find(attrs={"name": "s"}) + + self.req.lastURL = str(url) + sj = self.load(str(url), post={'s': sinp["value"], 'c': result, 'action': "Download"}) + + soup = BeautifulSoup(sj) + rawLinks = soup.findAll(attrs={"action": re.compile("^http://download.serienjunkies.org/")}) + + if not len(rawLinks) > 0: + sleep(1) + self.retry() + return + + self.correctCaptcha() + + links = [] + for link in rawLinks: + frameUrl = link["action"].replace("/go-", "/frame/go-") + links.append(self.handleFrame(frameUrl)) + if re.search("#hasName", url) or ((self.getConfig("changeNameSJ") == "Packagename") and (self.getConfig("changeNameDJ") == "Packagename")): + self.core.files.addLinks(links, self.pyfile.package().id) + else: + if h1.text[2] == "_": + eName = h1.text[3:] + else: + eName = h1.text + self.packages.append((eName, links, eName)) + + + def handleOldStyleLink(self, url): + sj = self.req.load(str(url)) + soup = BeautifulSoup(sj) + form = soup.find("form", attrs={"action": re.compile("^http://serienjunkies.org")}) + captchaTag = form.find(attrs={"src": re.compile("^/safe/secure/")}) + captchaUrl = "http://serienjunkies.org" + captchaTag["src"] + result = self.decryptCaptcha(str(captchaUrl)) + url = form["action"] + sinp = form.find(attrs={"name": "s"}) + + self.req.load(str(url), post={'s': sinp["value"], 'c': result, 'dl.start': "Download"}, cookies=False, + just_header=True) + decrypted = self.req.lastEffectiveURL + if decrypted == str(url): + self.retry() + self.core.files.addLinks([decrypted], self.pyfile.package().id) + + def handleFrame(self, url): + self.req.load(str(url)) + return self.req.lastEffectiveURL + + def handleShowDJ(self, url): + src = self.getSJSrc(url) + soup = BeautifulSoup(src) + post = soup.find("div", attrs={"id": "page_post"}) + ps = post.findAll("p") + found = unescape(soup.find("h2").find("a").string.split(' –')[0]) + if found: + seasonName = found + + groups = {} + gid = -1 + for p in ps: + if re.search("Sprache|Format", str(p)): + var = p.findAll("strong") + opts = {"Sprache": "", "Format": ""} + for v in var: + n = unescape(v.string).strip() + n = re.sub(r"^([:]?)(.*?)([:]?)$", r'\2', n) + if n.strip() not in opts: + continue + val = v.nextSibling + if not val: + continue + val = val.replace("|", "").strip() + val = re.sub(r"^([:]?)(.*?)([:]?)$", r'\2', val) + opts[n.strip()] = val.strip() + gid += 1 + groups[gid] = {} + groups[gid]["ep"] = {} + groups[gid]["opts"] = opts + elif re.search("Download:", str(p)): + parts = str(p).split("
    ") + if re.search("", parts[0]): + ename = re.search('(.*?)',parts[0]).group(1).strip().decode("utf-8").replace("–", "-") + groups[gid]["ep"][ename] = {} + parts.remove(parts[0]) + for part in parts: + hostername = re.search(" \| ([-a-zA-Z0-9]+\.\w+)",part) + if hostername: + hostername = hostername.group(1) + groups[gid]["ep"][ename][hostername] = [] + links = re.findall('href="(.*?)"',part) + for link in links: + groups[gid]["ep"][ename][hostername].append(link + "#hasName") + + links = [] + for g in groups.values(): + for ename in g["ep"]: + links.extend(self.getpreferred(g["ep"][ename])) + if self.getConfig("changeNameDJ") == "Episode": + self.packages.append((ename, links, ename)) + links = [] + package = "%s (%s, %s)" % (seasonName, g["opts"]["Format"], g["opts"]["Sprache"]) + if self.getConfig("changeNameDJ") == "Format": + self.packages.append((package, links, package)) + links = [] + if (self.getConfig("changeNameDJ") == "Packagename") or re.search("#hasName", url): + self.core.files.addLinks(links, self.pyfile.package().id) + elif (self.getConfig("changeNameDJ") == "Show") or not re.search("#hasName", url): + self.packages.append((seasonName, links, seasonName)) + + + + + + + + def handleCategoryDJ(self, url): + package_links = [] + src = self.getSJSrc(url) + soup = BeautifulSoup(src) + content = soup.find("div", attrs={"id": "content"}) + for a in content.findAll("a", attrs={"rel": "bookmark"}): + package_links.append(a["href"]) + self.core.files.addLinks(package_links, self.pyfile.package().id) + + def decrypt(self, pyfile): + showPattern = re.compile("^http://serienjunkies.org/serie/(.*)/$") + seasonPattern = re.compile("^http://serienjunkies.org/.*?/(.*)/$") + episodePattern = re.compile("^http://download.serienjunkies.org/f-.*?.html(#hasName)?$") + oldStyleLink = re.compile("^http://serienjunkies.org/safe/(.*)$") + categoryPatternDJ = re.compile("^http://dokujunkies.org/.*?(.*)$") + showPatternDJ = re.compile("^http://dokujunkies.org/.*?/(.*)\.html(#hasName)?$") + framePattern = re.compile("^http://download.(serienjunkies.org|dokujunkies.org)/frame/go-.*?/$") + url = pyfile.url + if framePattern.match(url): + self.packages.append((self.pyfile.package().name, [self.handleFrame(url)], self.pyfile.package().name)) + elif episodePattern.match(url): + self.handleEpisode(url) + elif oldStyleLink.match(url): + self.handleOldStyleLink(url) + elif showPattern.match(url): + self.handleShow(url) + elif showPatternDJ.match(url): + self.handleShowDJ(url) + elif seasonPattern.match(url): + self.handleSeason(url) + elif categoryPatternDJ.match(url): + self.handleCategoryDJ(url) + + #selects the preferred hoster, after that selects any hoster (ignoring the one to ignore) + def getpreferred(self, hosterlist): + + result = [] + preferredList = self.getConfig("hosterList").strip().lower().replace('|',',').replace('.','').replace(';',',').split(',') + if (self.getConfig("randomPreferred") == True) and (self.getConfig("hosterListMode") in ["OnlyOne","OnlyPreferred(One)"]) : + random.shuffle(preferredList) + # we don't want hosters be read two times + hosterlist2 = hosterlist.copy() + + for preferred in preferredList: + for Hoster in hosterlist: + if preferred == Hoster.lower().replace('.',''): + for Part in hosterlist[Hoster]: + self.logDebug("selected " + Part) + result.append(str(Part)) + del(hosterlist2[Hoster]) + if (self.getConfig("hosterListMode") in ["OnlyOne","OnlyPreferred(One)"]): + return result + + + ignorelist = self.getConfig("ignoreList").strip().lower().replace('|',',').replace('.','').replace(';',',').split(',') + if self.getConfig('hosterListMode') in ["OnlyOne","All"]: + for Hoster in hosterlist2: + if Hoster.strip().lower().replace('.','') not in ignorelist: + for Part in hosterlist2[Hoster]: + self.logDebug("selected2 " + Part) + result.append(str(Part)) + + if self.getConfig('hosterListMode') == "OnlyOne": + return result + return result diff --git a/pyload/plugins/crypter/ShareLinksBiz.py b/pyload/plugins/crypter/ShareLinksBiz.py new file mode 100644 index 000000000..b0e735896 --- /dev/null +++ b/pyload/plugins/crypter/ShareLinksBiz.py @@ -0,0 +1,269 @@ +# -*- coding: utf-8 -*- + +from Crypto.Cipher import AES +from module.plugins.Crypter import Crypter +from module.plugins.ReCaptcha import ReCaptcha +import base64 +import binascii +import re + + +class ShareLinksBiz(Crypter): + __name__ = "ShareLinksBiz" + __type__ = "crypter" + __pattern__ = r"(?Phttp://[\w\.]*?(share-links|s2l)\.biz)/(?P_?[0-9a-z]+)(/.*)?" + __version__ = "1.12" + __description__ = """Share-Links.biz Crypter""" + __author_name__ = ("fragonib") + __author_mail__ = ("fragonib[AT]yahoo[DOT]es") + + + def setup(self): + self.baseUrl = None + self.fileId = None + self.package = None + self.html = None + self.captcha = False + + def decrypt(self, pyfile): + + # Init + self.initFile(pyfile) + + # Request package + url = self.baseUrl + '/' + self.fileId + self.html = self.load(url, decode=True) + + # Unblock server (load all images) + self.unblockServer() + + # Check for protection + if self.isPasswordProtected(): + self.unlockPasswordProtection() + self.handleErrors() + + if self.isCaptchaProtected(): + self.captcha = True + self.unlockCaptchaProtection() + self.handleErrors() + + # Extract package links + package_links = [] + package_links.extend(self.handleWebLinks()) + package_links.extend(self.handleContainers()) + package_links.extend(self.handleCNL2()) + package_links = set(package_links) + + # Get package info + package_name, package_folder = self.getPackageInfo() + + # Pack + self.packages = [(package_name, package_links, package_folder)] + + def initFile(self, pyfile): + url = pyfile.url + if 's2l.biz' in url: + url = self.load(url, just_header=True)['location'] + self.baseUrl = re.search(self.__pattern__, url).group(1) + self.fileId = re.match(self.__pattern__, url).group('id') + self.package = pyfile.package() + + def isOnline(self): + if "No usable content was found" in self.html: + self.logDebug("File not found") + return False + return True + + def isPasswordProtected(self): + if re.search(r'''''', self.html): + self.logDebug("Links are protected") + return True + return False + + def isCaptchaProtected(self): + if '=x1 and x<=x2) and (y>=y1 and y<=y2): + return href + + def handleErrors(self): + if "The inserted password was wrong" in self.html: + self.logDebug("Incorrect password, please set right password on 'Edit package' form and retry") + self.fail("Incorrect password, please set right password on 'Edit package' form and retry") + + if self.captcha: + if "Your choice was wrong" in self.html: + self.logDebug("Invalid captcha, retrying") + self.invalidCaptcha() + self.setWait(5) + self.wait() + self.retry() + else: + self.correctCaptcha() + + def getPackageInfo(self): + name = folder = None + + # Extract from web package header + title_re = r'

    (.*)

    ' + m = re.search(title_re, self.html, re.DOTALL) + if m is not None: + title = m.group(1).strip() + if 'unnamed' not in title: + name = folder = title + self.logDebug("Found name [%s] and folder [%s] in package info" % (name, folder)) + + # Fallback to defaults + if not name or not folder: + name = self.package.name + folder = self.package.folder + self.logDebug("Package info not found, defaulting to pyfile name [%s] and folder [%s]" % (name, folder)) + + # Return package info + return name, folder + + def handleWebLinks(self): + package_links = [] + self.logDebug("Handling Web links") + + #@TODO: Gather paginated web links + pattern = r"javascript:_get\('(.*?)', \d+, ''\)" + ids = re.findall(pattern, self.html) + self.logDebug("Decrypting %d Web links" % len(ids)) + for i, id in enumerate(ids): + try: + self.logDebug("Decrypting Web link %d, [%s]" % (i+1, id)) + dwLink = self.baseUrl + "/get/lnk/" + id + response = self.load(dwLink) + code = re.search(r'frm/(\d+)', response).group(1) + fwLink = self.baseUrl + "/get/frm/" + code + response = self.load(fwLink) + jscode = re.search(r'', response, re.DOTALL).group(1) + jscode = self.js.eval("f = %s" % jscode) + jslauncher = "window=''; parent={frames:{Main:{location:{href:''}}},location:''}; %s; parent.frames.Main.location.href" + dlLink = self.js.eval(jslauncher % jscode) + self.logDebug("JsEngine returns value [%s] for redirection link" % dlLink) + package_links.append(dlLink) + except Exception, detail: + self.logDebug("Error decrypting Web link [%s], %s" % (id, detail)) + return package_links + + def handleContainers(self): + package_links = [] + self.logDebug("Handling Container links") + + pattern = r"javascript:_get\('(.*?)', 0, '(rsdf|ccf|dlc)'\)" + containersLinks = re.findall(pattern, self.html) + self.logDebug("Decrypting %d Container links" % len(containersLinks)) + for containerLink in containersLinks: + link = "%s/get/%s/%s" % (self.baseUrl, containerLink[1], containerLink[0]) + package_links.append(link) + return package_links + + def handleCNL2(self): + package_links = [] + self.logDebug("Handling CNL2 links") + + if '/lib/cnl2/ClicknLoad.swf' in self.html: + try: + (crypted, jk) = self._getCipherParams() + package_links.extend(self._getLinks(crypted, jk)) + except: + self.fail("Unable to decrypt CNL2 links") + return package_links + + def _getCipherParams(self): + + # Request CNL2 + code = re.search(r'ClicknLoad.swf\?code=(.*?)"', self.html).group(1) + url = "%s/get/cnl2/%s" % (self.baseUrl, code) + response = self.load(url) + params = response.split(";;") + + # Get jk + strlist = list(base64.standard_b64decode(params[1])) + strlist.reverse() + jk = ''.join(strlist) + + # Get crypted + strlist = list(base64.standard_b64decode(params[2])) + strlist.reverse() + crypted = ''.join(strlist) + + # Log and return + return crypted, jk + + def _getLinks(self, crypted, jk): + + # Get key + jreturn = self.js.eval("%s f()" % jk) + self.logDebug("JsEngine returns value [%s]" % jreturn) + key = binascii.unhexlify(jreturn) + + # Decode crypted + crypted = base64.standard_b64decode(crypted) + + # Decrypt + Key = key + IV = key + obj = AES.new(Key, AES.MODE_CBC, IV) + text = obj.decrypt(crypted) + + # Extract links + text = text.replace("\x00", "").replace("\r", "") + links = text.split("\n") + links = filter(lambda x: x != "", links) + + # Log and return + self.logDebug("Block has %d links" % len(links)) + return links \ No newline at end of file diff --git a/pyload/plugins/crypter/ShareRapidComFolder.py b/pyload/plugins/crypter/ShareRapidComFolder.py new file mode 100644 index 000000000..cb7f37525 --- /dev/null +++ b/pyload/plugins/crypter/ShareRapidComFolder.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- + +from module.plugins.internal.SimpleCrypter import SimpleCrypter + +class ShareRapidComFolder(SimpleCrypter): + __name__ = "ShareRapidComFolder" + __type__ = "crypter" + __pattern__ = r"http://(?:www\.)?((share(-?rapid\.(biz|com|cz|info|eu|net|org|pl|sk)|-(central|credit|free|net)\.cz|-ms\.net)|(s-?rapid|rapids)\.(cz|sk))|(e-stahuj|mediatack|premium-rapidshare|rapidshare-premium|qiuck)\.cz|kadzet\.com|stahuj-zdarma\.eu|strelci\.net|universal-share\.com)/(slozka/.+)" + __version__ = "0.01" + __description__ = """Share-Rapid.com Folder Plugin""" + __author_name__ = ("zoidberg") + __author_mail__ = ("zoidberg@mujmail.cz") + + LINK_PATTERN = r']*>
    ' \ No newline at end of file diff --git a/pyload/plugins/crypter/SpeedLoadOrgFolder.py b/pyload/plugins/crypter/SpeedLoadOrgFolder.py new file mode 100644 index 000000000..f85ede6f3 --- /dev/null +++ b/pyload/plugins/crypter/SpeedLoadOrgFolder.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- + +############################################################################ +# This program is free software: you can redistribute it and/or modify # +# it under the terms of the GNU Affero General Public License as # +# published by the Free Software Foundation, either version 3 of the # +# License, or (at your option) any later version. # +# # +# This program is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU Affero General Public License for more details. # +# # +# You should have received a copy of the GNU Affero General Public License # +# along with this program. If not, see . # +############################################################################ + +from module.plugins.internal.SimpleCrypter import SimpleCrypter + +class SpeedLoadOrgFolder(SimpleCrypter): + __name__ = "SpeedLoadOrgFolder" + __type__ = "crypter" + __pattern__ = r"http://(www\.)?speedload\.org/(\d+~f$|folder/\d+/)" + __version__ = "0.2" + __description__ = """Speedload Crypter Plugin""" + __author_name__ = ("stickell") + __author_mail__ = ("l.stickell@yahoo.it") + + LINK_PATTERN = r'', self.html, re.S).group(1) + self.logDebug(captcha_div) + numerals = re.findall('(\d)', html_unescape(captcha_div)) + inputs['code'] = "".join([a[1] for a in sorted(numerals, key = lambda num: int(num[0]))]) + self.logDebug("CAPTCHA", inputs['code'], numerals) + return 3 + +getInfo = create_getInfo(RarefileNet) diff --git a/pyload/plugins/hoster/RealdebridCom.py b/pyload/plugins/hoster/RealdebridCom.py new file mode 100644 index 000000000..3c796232e --- /dev/null +++ b/pyload/plugins/hoster/RealdebridCom.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import re +from time import time +from urllib import quote, unquote +from random import randrange + +from module.utils import parseFileSize, remove_chars +from module.common.json_layer import json_loads +from module.plugins.Hoster import Hoster + +class RealdebridCom(Hoster): + __name__ = "RealdebridCom" + __version__ = "0.49" + __type__ = "hoster" + + __pattern__ = r"https?://.*real-debrid\..*" + __description__ = """Real-Debrid.com hoster plugin""" + __author_name__ = ("Devirex, Hazzard") + __author_mail__ = ("naibaf_11@yahoo.de") + + def getFilename(self, url): + try: + name = unquote(url.rsplit("/", 1)[1]) + except IndexError: + name = "Unknown_Filename..." + if not name or name.endswith(".."): #incomplete filename, append random stuff + name += "%s.tmp" % randrange(100,999) + return name + + def init(self): + self.tries = 0 + self.chunkLimit = 3 + self.resumeDownload = True + + + def process(self, pyfile): + if not self.account: + self.logError(_("Please enter your Real-debrid account or deactivate this plugin")) + self.fail("No Real-debrid account provided") + + self.log.debug("Real-Debrid: Old URL: %s" % pyfile.url) + if re.match(self.__pattern__, pyfile.url): + new_url = pyfile.url + else: + password = self.getPassword().splitlines() + if not password: password = "" + else: password = password[0] + + url = "http://real-debrid.com/ajax/unrestrict.php?lang=en&link=%s&password=%s&time=%s" % (quote(pyfile.url, ""), password, int(time()*1000)) + page = self.load(url) + data = json_loads(page) + + self.logDebug("Returned Data: %s" % data) + + if data["error"] != 0: + if data["message"] == "Your file is unavailable on the hoster.": + self.offline() + else: + self.logWarning(data["message"]) + self.tempOffline() + else: + if self.pyfile.name is not None and self.pyfile.name.endswith('.tmp') and data["file_name"]: + self.pyfile.name = data["file_name"] + self.pyfile.size = parseFileSize(data["file_size"]) + new_url = data['generated_links'][0][-1] + + if self.getConfig("https"): + new_url = new_url.replace("http://", "https://") + else: + new_url = new_url.replace("https://", "http://") + + self.log.debug("Real-Debrid: New URL: %s" % new_url) + + if pyfile.name.startswith("http") or pyfile.name.startswith("Unknown") or pyfile.name.endswith('..'): + #only use when name wasnt already set + pyfile.name = self.getFilename(new_url) + + self.download(new_url, disposition=True) + + check = self.checkDownload( + {"error": "An error occured while processing your request"}) + + if check == "error": + #usual this download can safely be retried + self.retry(reason="An error occured while generating link.", wait_time=60) + diff --git a/pyload/plugins/hoster/RedtubeCom.py b/pyload/plugins/hoster/RedtubeCom.py new file mode 100644 index 000000000..c2083e679 --- /dev/null +++ b/pyload/plugins/hoster/RedtubeCom.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import re +from module.plugins.Hoster import Hoster +from module.unescape import unescape + +class RedtubeCom(Hoster): + __name__ = "RedtubeCom" + __type__ = "hoster" + __pattern__ = r'http://[\w\.]*?redtube\.com/\d+' + __version__ = "0.2" + __description__ = """Redtube.com Download Hoster""" + __author_name__ = ("jeix") + __author_mail__ = ("jeix@hasnomail.de") + + def process(self, pyfile): + self.download_html() + if not self.file_exists(): + self.offline() + + pyfile.name = self.get_file_name() + self.download(self.get_file_url()) + + def download_html(self): + url = self.pyfile.url + self.html = self.load(url) + + def get_file_url(self): + """ returns the absolute downloadable filepath + """ + if self.html is None: + self.download_html() + + file_url = unescape(re.search(r'hashlink=(http.*?)"', self.html).group(1)) + + return file_url + + def get_file_name(self): + if self.html is None: + self.download_html() + + name = re.search('(.*?)- RedTube - Free Porn Videos', self.html).group(1).strip() + ".flv" + return name + + def file_exists(self): + """ returns True or False + """ + if self.html is None: + self.download_html() + + if re.search(r'This video has been removed.', self.html) is not None: + return False + else: + return True + diff --git a/pyload/plugins/hoster/RehostTo.py b/pyload/plugins/hoster/RehostTo.py new file mode 100644 index 000000000..141dcb8c8 --- /dev/null +++ b/pyload/plugins/hoster/RehostTo.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from urllib import quote, unquote +from module.plugins.Hoster import Hoster + +class RehostTo(Hoster): + __name__ = "RehostTo" + __version__ = "0.11" + __type__ = "hoster" + __pattern__ = r"https?://.*rehost.to\..*" + __description__ = """rehost.com hoster plugin""" + __author_name__ = ("RaNaN") + __author_mail__ = ("RaNaN@pyload.org") + + def getFilename(self, url): + return unquote(url.rsplit("/", 1)[1]) + + def setup(self): + self.chunkLimit = 1 + self.resumeDownload = True + + def process(self, pyfile): + if not self.account: + self.log.error(_("Please enter your rehost.to account or deactivate this plugin")) + self.fail("No rehost.to account provided") + + data = self.account.getAccountInfo(self.user) + long_ses = data["long_ses"] + + self.log.debug("Rehost.to: Old URL: %s" % pyfile.url) + new_url = "http://rehost.to/process_download.php?user=cookie&pass=%s&dl=%s" % (long_ses, quote(pyfile.url, "")) + + #raise timeout to 2min + self.req.setOption("timeout", 120) + + self.download(new_url, disposition=True) \ No newline at end of file diff --git a/pyload/plugins/hoster/ReloadCc.py b/pyload/plugins/hoster/ReloadCc.py new file mode 100644 index 000000000..7dc6d9bb6 --- /dev/null +++ b/pyload/plugins/hoster/ReloadCc.py @@ -0,0 +1,103 @@ +from module.plugins.Hoster import Hoster + +from module.common.json_layer import json_loads + +from module.network.HTTPRequest import BadHeader + +class ReloadCc(Hoster): + __name__ = "ReloadCc" + __version__ = "0.4" + __type__ = "hoster" + __description__ = """Reload.Cc hoster plugin""" + + # Since we want to allow the user to specify the list of hoster to use we let MultiHoster.coreReady create the regex patterns for us using getHosters in our ReloadCc hook. + __pattern__ = None + + __author_name__ = ("Reload Team") + __author_mail__ = ("hello@reload.cc") + + def process(self, pyfile): + # Check account + if not self.account or not self.account.canUse(): + self.logError("Please enter a valid reload.cc account or deactivate this plugin") + self.fail("No valid reload.cc account provided") + + # In some cases hostsers do not supply us with a filename at download, so we are going to set a fall back filename (e.g. for freakshare or xfileshare) + self.pyfile.name = self.pyfile.name.split('/').pop() # Remove everthing before last slash + + # Correction for automatic assigned filename: Removing html at end if needed + suffix_to_remove = ["html", "htm", "php", "php3", "asp", "shtm", "shtml", "cfml", "cfm"] + temp = self.pyfile.name.split('.') + if temp.pop() in suffix_to_remove: + self.pyfile.name = ".".join(temp) + + # Get account data + (user, data) = self.account.selectAccount() + + query_params = dict( + via='pyload', + v=1, + user=user, + uri=self.pyfile.url + ) + + try: + query_params.update(dict(hash=self.account.infos[user]['pwdhash'])) + except Exception: + query_params.update(dict(pwd=data['password'])) + + try: + answer = self.load("http://api.reload.cc/dl", get=query_params) + except BadHeader, e: + if e.code == 400: + self.fail("The URI is not supported by Reload.cc.") + elif e.code == 401: + self.fail("Wrong username or password") + elif e.code == 402: + self.fail("Your account is inactive. A payment is required for downloading!") + elif e.code == 403: + self.fail("Your account is disabled. Please contact the Reload.cc support!") + elif e.code == 409: + self.logWarning("The hoster seems to be a limited hoster and you've used your daily traffic for this hoster: %s" % self.pyfile.url) + # Wait for 6 hours and retry up to 4 times => one day + self.retry(max_retries=4, wait_time=(3600 * 6), reason="Limited hoster traffic limit exceeded") + elif e.code == 429: + self.retry(max_retries=5, wait_time=120, reason="Too many concurrent connections") # Too many connections, wait 2 minutes and try again + elif e.code == 503: + self.retry(wait_time=600, reason="Reload.cc is currently in maintenance mode! Please check again later.") # Retry in 10 minutes + else: + self.fail("Internal error within Reload.cc. Please contact the Reload.cc support for further information.") + return + + data = json_loads(answer) + + # Check status and decide what to do + status = data.get('status', None) + if status == "ok": + conn_limit = data.get('msg', 0) + # API says these connections are limited + # Make sure this limit is used - the download will fail if not + if conn_limit > 0: + try: + self.limitDL = int(conn_limit) + except ValueError: + self.limitDL = 1 + else: + self.limitDL = 0 + + try: + self.download(data['link'], disposition=True) + except BadHeader, e: + if e.code == 404: + self.fail("File Not Found") + elif e.code == 412: + self.fail("File access password is wrong") + elif e.code == 417: + self.fail("Password required for file access") + elif e.code == 429: + self.retry(max_retries=5, wait_time=120, reason="Too many concurrent connections") # Too many connections, wait 2 minutes and try again + else: + self.fail("Internal error within Reload.cc. Please contact the Reload.cc support for further information.") + return + else: + self.fail("Internal error within Reload.cc. Please contact the Reload.cc support for further information.") diff --git a/pyload/plugins/hoster/RyushareCom.py b/pyload/plugins/hoster/RyushareCom.py new file mode 100644 index 000000000..7bfe4e8fe --- /dev/null +++ b/pyload/plugins/hoster/RyushareCom.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +from module.plugins.hoster.XFileSharingPro import XFileSharingPro, create_getInfo +import re + + +class RyushareCom(XFileSharingPro): + __name__ = "RyushareCom" + __type__ = "hoster" + __pattern__ = r"http://(?:\w*\.)*?ryushare.com/\w{11,}" + __version__ = "0.11" + __description__ = """ryushare.com hoster plugin""" + __author_name__ = ("zoidberg", "stickell") + __author_mail__ = ("zoidberg@mujmail.cz", "l.stickell@yahoo.it") + + HOSTER_NAME = "ryushare.com" + + WAIT_PATTERN = r'(?:You have to|Please) wait (?:(?P\d+) minutes, )?(?:)?(?P\d+)(?:)? seconds' + DIRECT_LINK_PATTERN = r'Click here to download' + + def setup(self): + self.resumeDownload = self.multiDL = True + if not self.premium: + self.limitDL = 2 + # Up to 3 chunks allowed in free downloads. Unknown for premium + self.chunkLimit = 3 + + def getDownloadLink(self): + self.html = self.load(self.pyfile.url) + action, inputs = self.parseHtmlForm(input_names={"op": re.compile("^download")}) + if inputs.has_key('method_premium'): + del inputs['method_premium'] + + self.html = self.load(self.pyfile.url, post = inputs) + action, inputs = self.parseHtmlForm('F1') + + for i in xrange(10): + self.logInfo('Attempt to detect direct link #%d' % i) + + # Wait + if 'You have reached the download-limit!!!' in self.html: + self.setWait(3600, True) + else: + m = re.search(self.WAIT_PATTERN, self.html).groupdict('0') + waittime = int(m['min']) * 60 + int(m['sec']) + self.setWait(waittime) + self.wait() + + self.html = self.load(self.pyfile.url, post = inputs) + if 'Click here to download' in self.html: + m = re.search(self.DIRECT_LINK_PATTERN, self.html) + return m.group(1) + + self.parseError('No direct link within 10 retries') + +getInfo = create_getInfo(RyushareCom) diff --git a/pyload/plugins/hoster/SecureUploadEu.py b/pyload/plugins/hoster/SecureUploadEu.py new file mode 100644 index 000000000..b9a900d96 --- /dev/null +++ b/pyload/plugins/hoster/SecureUploadEu.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +from module.plugins.hoster.XFileSharingPro import XFileSharingPro, create_getInfo + + +class SecureUploadEu(XFileSharingPro): + __name__ = "SecureUploadEu" + __type__ = "hoster" + __pattern__ = r"http://(www\.)?secureupload\.eu/(\w){12}(/\w+)" + __version__ = "0.01" + __description__ = """SecureUpload.eu hoster plugin""" + __author_name__ = ("z00nx") + __author_mail__ = ("z00nx0@gmail.com") + + HOSTER_NAME = "secureupload.eu" + FILE_INFO_PATTERN = '

    Downloading (?P[^<]+) \((?P[^<]+)\)

    ' + FILE_OFFLINE_PATTERN = 'The file was removed|File Not Found' + +getInfo = create_getInfo(SecureUploadEu) diff --git a/pyload/plugins/hoster/SendmywayCom.py b/pyload/plugins/hoster/SendmywayCom.py new file mode 100644 index 000000000..fcbac850a --- /dev/null +++ b/pyload/plugins/hoster/SendmywayCom.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +from module.plugins.hoster.XFileSharingPro import XFileSharingPro, create_getInfo + + +class SendmywayCom(XFileSharingPro): + __name__ = "SendmywayCom" + __type__ = "hoster" + __pattern__ = r"http://(?:\w*\.)*?sendmyway.com/\w{12}" + __version__ = "0.01" + __description__ = """SendMyWay hoster plugin""" + __author_name__ = ("zoidberg") + __author_mail__ = ("zoidberg@mujmail.cz") + + FILE_NAME_PATTERN = r'

    <.*?>\s*(?P.+)' + FILE_SIZE_PATTERN = r'\((?P\d+) bytes\)' + HOSTER_NAME = "sendmyway.com" + +getInfo = create_getInfo(SendmywayCom) diff --git a/pyload/plugins/hoster/SendspaceCom.py b/pyload/plugins/hoster/SendspaceCom.py new file mode 100644 index 000000000..22abaff56 --- /dev/null +++ b/pyload/plugins/hoster/SendspaceCom.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: zoidberg +""" + +import re +from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo + +class SendspaceCom(SimpleHoster): + __name__ = "SendspaceCom" + __type__ = "hoster" + __pattern__ = r"http://(www\.)?sendspace.com/file/.*" + __version__ = "0.13" + __description__ = """sendspace.com plugin - free only""" + __author_name__ = ("zoidberg") + + DOWNLOAD_URL_PATTERN = r'\s*<(?:b|strong)>(?P[^<]+)\s*File Size:\s*(?P[0-9.]+)(?P[kKMG])i?B\s*

    ' + FILE_OFFLINE_PATTERN = r'
    Sorry, the file you requested is not available.
    ' + CAPTCHA_PATTERN = r'' + USER_CAPTCHA_PATTERN = r'' + + def handleFree(self): + params = {} + for i in range(3): + found = re.search(self.DOWNLOAD_URL_PATTERN, self.html) + if found: + if params.has_key('captcha_hash'): self.correctCaptcha() + download_url = found.group(1) + break + + found = re.search(self.CAPTCHA_PATTERN, self.html) + if found: + if params.has_key('captcha_hash'): self.invalidCaptcha() + captcha_url1 = "http://www.sendspace.com/" + found.group(1) + found = re.search(self.USER_CAPTCHA_PATTERN, self.html) + captcha_url2 = "http://www.sendspace.com/" + found.group(1) + params = {'captcha_hash' : found.group(2), + 'captcha_submit': 'Verify', + 'captcha_answer': self.decryptCaptcha(captcha_url1) + " " + self.decryptCaptcha(captcha_url2) + } + else: + params = {'download': "Regular Download"} + + self.logDebug(params) + self.html = self.load(self.pyfile.url, post = params) + else: + self.fail("Download link not found") + + self.logDebug("Download URL: %s" % download_url) + self.download(download_url) + +create_getInfo(SendspaceCom) \ No newline at end of file diff --git a/pyload/plugins/hoster/Share4webCom.py b/pyload/plugins/hoster/Share4webCom.py new file mode 100644 index 000000000..ef9c2acf8 --- /dev/null +++ b/pyload/plugins/hoster/Share4webCom.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +from module.plugins.hoster.UnibytesCom import UnibytesCom +from module.plugins.internal.SimpleHoster import create_getInfo + +class Share4webCom(UnibytesCom): + __name__ = "Share4webCom" + __type__ = "hoster" + __pattern__ = r"http://(www\.)?share4web\.com/get/\w+" + __version__ = "0.1" + __description__ = """Share4web.com""" + __author_name__ = ("zoidberg") + + DOMAIN = 'http://www.share4web.com' + +getInfo = create_getInfo(UnibytesCom) \ No newline at end of file diff --git a/pyload/plugins/hoster/Share76Com.py b/pyload/plugins/hoster/Share76Com.py new file mode 100644 index 000000000..db850cb73 --- /dev/null +++ b/pyload/plugins/hoster/Share76Com.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +from module.plugins.hoster.XFileSharingPro import XFileSharingPro, create_getInfo + +class Share76Com(XFileSharingPro): + __name__ = "Share76Com" + __type__ = "hoster" + __pattern__ = r"http://(?:\w*\.)*?share76.com/\w{12}" + __version__ = "0.03" + __description__ = """share76.com hoster plugin""" + __author_name__ = ("me") + + FILE_INFO_PATTERN = r'

    \s*File:\s*]*>(?P[^>]+)\s*\[]*>(?P[0-9.]+) (?P[kKMG])i?B\]

    ' + HOSTER_NAME = "share76.com" + + def setup(self): + self.resumeDownload = self.multiDL = self.premium + self.chunkLimit = 1 + +getInfo = create_getInfo(Share76Com) diff --git a/pyload/plugins/hoster/ShareFilesCo.py b/pyload/plugins/hoster/ShareFilesCo.py new file mode 100644 index 000000000..ee44b0a1f --- /dev/null +++ b/pyload/plugins/hoster/ShareFilesCo.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +from module.plugins.hoster.XFileSharingPro import XFileSharingPro, create_getInfo +import re + +class ShareFilesCo(XFileSharingPro): + __name__ = "ShareFilesCo" + __type__ = "hoster" + __pattern__ = r"http://(www\.)?sharefiles\.co/\w{12}" + __version__ = "0.01" + __description__ = """Sharefiles.co hoster plugin""" + __author_name__ = ("stickell") + __author_mail__ = ("l.stickell@yahoo.it") + + HOSTER_NAME = "sharefiles.co" + + def startDownload(self, link): + link = link.strip() + if link.startswith('http://adf.ly'): + link = re.sub('http://adf.ly/\d+/', '', link) + if self.captcha: self.correctCaptcha() + self.logDebug('DIRECT LINK: %s' % link) + self.download(link) + +getInfo = create_getInfo(ShareFilesCo) diff --git a/pyload/plugins/hoster/ShareRapidCom.py b/pyload/plugins/hoster/ShareRapidCom.py new file mode 100644 index 000000000..5a08fed1f --- /dev/null +++ b/pyload/plugins/hoster/ShareRapidCom.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import re +from pycurl import HTTPHEADER +from module.network.RequestFactory import getRequest, getURL +from module.network.HTTPRequest import BadHeader +from module.plugins.internal.SimpleHoster import SimpleHoster, parseFileInfo +from module.common.json_layer import json_loads + +def checkFile(url): + response = getURL("http://share-rapid.com/checkfiles.php", post = {"files": url}, decode = True) + info = json_loads(response) + + if "error" in info: + if info['error'] == False: + info['name'] = info['filename'] + info['status'] = 2 + elif info['msg'] == "Not found": + info['status'] = 1 #offline + elif info['msg'] == "Service Unavailable": + info['status'] = 6 #temp.offline + + return info + +def getInfo(urls): + for url in urls: + info = checkFile(url) + if "filename" in info: + yield info['name'], info['size'], info['status'], url + else: + file_info = (url, 0, 3, url) + h = getRequest() + try: + h.c.setopt(HTTPHEADER, ["Accept: text/html"]) + html = h.load(url, cookies = True, decode = True) + file_info = parseFileInfo(ShareRapidCom, url, html) + finally: + h.close() + yield file_info + +class ShareRapidCom(SimpleHoster): + __name__ = "ShareRapidCom" + __type__ = "hoster" + __pattern__ = r"http://(?:www\.)?((share(-?rapid\.(biz|com|cz|info|eu|net|org|pl|sk)|-(central|credit|free|net)\.cz|-ms\.net)|(s-?rapid|rapids)\.(cz|sk))|(e-stahuj|mediatack|premium-rapidshare|rapidshare-premium|qiuck)\.cz|kadzet\.com|stahuj-zdarma\.eu|strelci\.net|universal-share\.com)/stahuj/(\w+)" + __version__ = "0.52" + __description__ = """Share-rapid.com plugin - premium only""" + __author_name__ = ("MikyWoW", "zoidberg") + __author_mail__ = ("MikyWoW@seznam.cz", "zoidberg@mujmail.cz") + + FILE_NAME_PATTERN = r']*>]*>(?:]*>)?(?P[^<]+)' + FILE_SIZE_PATTERN = r'Velikost:\s*\s*(?P[0-9.]+) (?P[kKMG])i?B' + FILE_OFFLINE_PATTERN = ur'Nastala chyba 404|Soubor byl smazán' + + DOWNLOAD_URL_PATTERN = r'
    ([^<]+)' + ERR_LOGIN_PATTERN = ur'
    Stahování je přístupné pouze přihlášeným uživatelům' + ERR_CREDIT_PATTERN = ur'
    Stahování zdarma je možné jen přes náš' + + FILE_URL_REPLACEMENTS = [(__pattern__, r'http://share-rapid.com/stahuj/\1')] + + def setup(self): + self.chunkLimit = 1 + self.resumeDownload = True + + def process(self, pyfile): + if not self.account: self.fail("User not logged in") + + self.info = checkFile(pyfile.url) + self.logDebug(self.info) + + pyfile.status = self.info['status'] + + if pyfile.status == 2: + pyfile.name = self.info['name'] + pyfile.size = self.info['size'] + elif pyfile.status == 1: + self.offline() + elif pyfile.status == 6: + self.tempOffline() + else: + self.fail("Unexpected file status") + + url = "http://share-rapid.com/stahuj/%s" % self.info['filepath'] + try: + self.html = self.load(url, decode=True) + except BadHeader, e: + self.account.relogin(self.user) + self.retry(3, 0, str(e)) + + found = re.search(self.DOWNLOAD_URL_PATTERN, self.html) + if found is not None: + link = found.group(1) + self.logDebug("Premium link: %s" % link) + + self.check_data = {"size": pyfile.size} + self.download(link) + else: + if re.search(self.ERR_LOGIN_PATTERN, self.html): + self.relogin(self.user) + self.retry(3,0,"User login failed") + elif re.search(self.ERR_CREDIT_PATTERN, self.html): + self.fail("Not enough credit left") + else: + self.fail("Download link not found") diff --git a/pyload/plugins/hoster/SharebeesCom.py b/pyload/plugins/hoster/SharebeesCom.py new file mode 100644 index 000000000..f5bacc5b0 --- /dev/null +++ b/pyload/plugins/hoster/SharebeesCom.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +from module.plugins.hoster.XFileSharingPro import XFileSharingPro, create_getInfo + + +class SharebeesCom(XFileSharingPro): + __name__ = "SharebeesCom" + __type__ = "hoster" + __pattern__ = r"http://(?:\w*\.)*?sharebees.com/\w{12}" + __version__ = "0.01" + __description__ = """ShareBees hoster plugin""" + __author_name__ = ("zoidberg") + __author_mail__ = ("zoidberg@mujmail.cz") + + FILE_NAME_PATTERN = r'

    <.*?>\s*(?P.+)' + FILE_SIZE_PATTERN = r'\((?P\d+) bytes\)' + FORM_PATTERN = 'F1' + HOSTER_NAME = "sharebees.com" + +getInfo = create_getInfo(SharebeesCom) diff --git a/pyload/plugins/hoster/ShareonlineBiz.py b/pyload/plugins/hoster/ShareonlineBiz.py new file mode 100644 index 000000000..e1867168b --- /dev/null +++ b/pyload/plugins/hoster/ShareonlineBiz.py @@ -0,0 +1,203 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import re +from base64 import b64decode +import hashlib +import random +from time import time, sleep + +from module.plugins.Hoster import Hoster +from module.network.RequestFactory import getURL +from module.plugins.Plugin import chunks +from module.plugins.ReCaptcha import ReCaptcha as _ReCaptcha + +def getInfo(urls): + api_url_base = "http://api.share-online.biz/linkcheck.php" + + for chunk in chunks(urls, 90): + api_param_file = {"links": "\n".join(x.replace("http://www.share-online.biz/dl/","").rstrip("/") for x in chunk)} #api only supports old style links + src = getURL(api_url_base, post=api_param_file, decode=True) + result = [] + for i, res in enumerate(src.split("\n")): + if not res: + continue + fields = res.split(";") + + if fields[1] == "OK": + status = 2 + elif fields[1] in ("DELETED", "NOT FOUND"): + status = 1 + else: + status = 3 + + result.append((fields[2], int(fields[3]), status, chunk[i])) + yield result + +#suppress ocr plugin +class ReCaptcha(_ReCaptcha): + def result(self, server, challenge): + return self.plugin.decryptCaptcha("%simage"%server, get={"c":challenge}, cookies=True, forceUser=True, imgtype="jpg") + +class ShareonlineBiz(Hoster): + __name__ = "ShareonlineBiz" + __type__ = "hoster" + __pattern__ = r"http://[\w\.]*?(share\-online\.biz|egoshare\.com)/(download.php\?id\=|dl/)[\w]+" + __version__ = "0.36" + __description__ = """Shareonline.biz Download Hoster""" + __author_name__ = ("spoob", "mkaay", "zoidberg") + __author_mail__ = ("spoob@pyload.org", "mkaay@mkaay.de", "zoidberg@mujmail.cz") + + ERROR_INFO_PATTERN = r'

    Information:

    \s*
    \s*(.*?)' + + def setup(self): + # range request not working? + # api supports resume, only one chunk + # website isn't supporting resuming in first place + self.file_id = re.search(r"(id\=|/dl/)([a-zA-Z0-9]+)", self.pyfile.url).group(2) + self.pyfile.url = "http://www.share-online.biz/dl/" + self.file_id + + self.resumeDownload = self.premium + self.multiDL = False + #self.chunkLimit = 1 + + self.check_data = None + + def process(self, pyfile): + if self.premium: + self.handleAPIPremium() + #web-download fallback removed - didn't work anyway + else: + self.handleFree() + + """ + check = self.checkDownload({"failure": re.compile(self.ERROR_INFO_PATTERN)}) + if check == "failure": + try: + self.retry(reason = self.lastCheck.group(1).decode("utf8")) + except: + self.retry(reason = "Unknown error") + """ + + if self.api_data: + self.check_data = {"size": int(self.api_data['size']), "md5": self.api_data['md5']} + + def downloadAPIData(self): + api_url_base = "http://api.share-online.biz/linkcheck.php?md5=1" + api_param_file = {"links": self.pyfile.url.replace("http://www.share-online.biz/dl/","")} #api only supports old style links + src = self.load(api_url_base, cookies=False, post=api_param_file, decode=True) + + fields = src.split(";") + self.api_data = {"fileid": fields[0], + "status": fields[1]} + if not self.api_data["status"] == "OK": + self.offline() + self.api_data["filename"] = fields[2] + self.api_data["size"] = fields[3] # in bytes + self.api_data["md5"] = fields[4].strip().lower().replace("\n\n", "") # md5 + + def handleFree(self): + self.downloadAPIData() + self.pyfile.name = self.api_data["filename"] + self.pyfile.size = int(self.api_data["size"]) + + self.html = self.load(self.pyfile.url, cookies = True) #refer, stuff + self.setWait(3) + self.wait() + + self.html = self.load("%s/free/" % self.pyfile.url, post={"dl_free":"1", "choice": "free"}, decode = True) + self.checkErrors() + + found = re.search(r'var wait=(\d+);', self.html) + + recaptcha = ReCaptcha(self) + for i in range(5): + challenge, response = recaptcha.challenge("6LdatrsSAAAAAHZrB70txiV5p-8Iv8BtVxlTtjKX") + self.setWait(int(found.group(1)) if found else 30) + response = self.load("%s/free/captcha/%d" % (self.pyfile.url, int(time() * 1000)), post = { + 'dl_free': '1', + 'recaptcha_challenge_field': challenge, + 'recaptcha_response_field': response}) + + if not response == '0': + break + + else: self.fail("No valid captcha solution received") + + download_url = response.decode("base64") + self.logDebug(download_url) + if not download_url.startswith("http://"): + self.parseError("download url") + + self.wait() + self.download(download_url) + # check download + check = self.checkDownload({ + "cookie": re.compile(r'
    Share-Online") + }) + if check == "cookie": + self.retry(5, 60, "Cookie failure") + elif check == "fail": + self.retry(5, 300, "Download failed") + + def checkErrors(self): + found = re.search(r"/failure/(.*?)/1", self.req.lastEffectiveURL) + if found: + err = found.group(1) + found = re.search(self.ERROR_INFO_PATTERN, self.html) + msg = found.group(1) if found else "" + self.logError(err, msg or "Unknown error occurred") + + if err in ('freelimit', 'size', 'proxy'): + self.fail(msg or "Premium account needed") + if err in ('invalid'): + self.fail(msg or "File not available") + elif err in ('server'): + self.setWait(600, False) + elif err in ('expired'): + self.setWait(30, False) + else: + self.setWait(300, True) + + self.wait() + self.retry(max_tries=25, reason = msg) + + def handleAPIPremium(self): #should be working better + self.account.getAccountInfo(self.user, True) + src = self.load("http://api.share-online.biz/account.php", + {"username": self.user, "password": self.account.accounts[self.user]["password"], "act": "download", "lid": self.file_id}) + + self.api_data = dlinfo = {} + for line in src.splitlines(): + key, value = line.split(": ") + dlinfo[key.lower()] = value + + self.logDebug(dlinfo) + if not dlinfo["status"] == "online": + self.offline() + + self.pyfile.name = dlinfo["name"] + self.pyfile.size = int(dlinfo["size"]) + + dlLink = dlinfo["url"] + if dlLink == "server_under_maintenance": + self.tempoffline() + else: + self.multiDL = True + self.download(dlLink) + + def checksum(self, local_file): + if self.api_data and "md5" in self.api_data and self.api_data["md5"]: + h = hashlib.md5() + f = open(local_file, "rb") + h.update(f.read()) + f.close() + hexd = h.hexdigest() + if hexd == self.api_data["md5"]: + return True, 0 + else: + return False, 1 + else: + self.logWarning("MD5 checksum missing") + return True, 5 diff --git a/pyload/plugins/hoster/ShareplaceCom.py b/pyload/plugins/hoster/ShareplaceCom.py new file mode 100644 index 000000000..1d4441925 --- /dev/null +++ b/pyload/plugins/hoster/ShareplaceCom.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import re +import urllib +from module.plugins.Hoster import Hoster + +class ShareplaceCom(Hoster): + __name__ = "ShareplaceCom" + __type__ = "hoster" + __pattern__ = r"(http://)?(www\.)?shareplace\.(com|org)/\?[a-zA-Z0-9]+" + __version__ = "0.1" + __description__ = """Shareplace.com Download Hoster""" + __author_name__ = ("ACCakut, based on YourfilesTo by jeix and skydancer") + __author_mail__ = ("none") + + def setup(self): + self.html = None + self.multiDL = True + + def process(self,pyfile): + self.pyfile = pyfile + self.prepare() + self.download(self.get_file_url()) + + def prepare(self): + if not self.file_exists(): + self.offline() + + self.pyfile.name = self.get_file_name() + + wait_time = self.get_waiting_time() + self.setWait(wait_time) + self.log.debug("%s: Waiting %d seconds." % (self.__name__,wait_time)) + self.wait() + + def get_waiting_time(self): + if self.html is None: + self.download_html() + + #var zzipitime = 15; + m = re.search(r'var zzipitime = (\d+);', self.html) + if m: + sec = int(m.group(1)) + else: + sec = 0 + + return sec + + def download_html(self): + url = re.sub("shareplace.com\/\?", "shareplace.com//index1.php/?a=", self.pyfile.url) + self.html = self.load(url, decode=True) + + def get_file_url(self): + """ returns the absolute downloadable filepath + """ + url = re.search(r"var beer = '(.*?)';", self.html) + if url: + url = url.group(1) + url = urllib.unquote(url.replace("http://http:/", "").replace("vvvvvvvvv", "").replace("lllllllll", "").replace("teletubbies", "")) + self.logDebug("URL: %s" % url) + return url + else: + self.fail("absolute filepath could not be found. offline? ") + + def get_file_name(self): + if self.html is None: + self.download_html() + + return re.search("\s*(.*?)\s*", self.html).group(1) + + def file_exists(self): + """ returns True or False + """ + if self.html is None: + self.download_html() + + if re.search(r"HTTP Status 404", self.html) is not None: + return False + else: + return True + + + diff --git a/pyload/plugins/hoster/ShragleCom.py b/pyload/plugins/hoster/ShragleCom.py new file mode 100644 index 000000000..99f9f2366 --- /dev/null +++ b/pyload/plugins/hoster/ShragleCom.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import re +from pycurl import FOLLOWLOCATION + +from module.plugins.Hoster import Hoster +from module.plugins.internal.SimpleHoster import parseHtmlForm +from module.plugins.ReCaptcha import ReCaptcha +from module.network.RequestFactory import getURL + +API_KEY = "078e5ca290d728fd874121030efb4a0d" + +def parseFileInfo(self, url): + file_id = re.match(self.__pattern__, url).group('ID') + + data = getURL( + "http://www.cloudnator.com/api.php?key=%s&action=getStatus&fileID=%s" % (API_KEY, file_id), + decode = True + ).split() + + if len(data) == 4: + name, size, md5, status = data + size = int(size) + + if hasattr(self, "check_data"): + self.checkdata = {"size": size, "md5": md5} + + return name, size, 2 if status == "0" else 1, url + else: + return url, 0, 1, url + +def getInfo(urls): + for url in urls: + file_info = parseFileInfo(ShragleCom, url) + yield file_info + +class ShragleCom(Hoster): + __name__ = "ShragleCom" + __type__ = "hoster" + __pattern__ = r"http://(?:www.)?(cloudnator|shragle).com/files/(?P.*?)/" + __version__ = "0.21" + __description__ = """Cloudnator.com (Shragle.com) Download PLugin""" + __author_name__ = ("RaNaN", "zoidberg") + __author_mail__ = ("RaNaN@pyload.org", "zoidberg@mujmail.cz") + + def setup(self): + self.html = None + self.multiDL = False + self.check_data = None + + def process(self, pyfile): + #get file status and info + self.pyfile.name, self.pyfile.size, status = parseFileInfo(self, pyfile.url)[:3] + if status != 2: + self.offline() + + self.handleFree() + + def handleFree(self): + self.html = self.load(self.pyfile.url) + + #get wait time + found = re.search('\s*var\sdownloadWait\s=\s(\d+);', self.html) + self.setWait(int(found.group(1)) if found else 30) + + #parse download form + action, inputs = parseHtmlForm('id="download', self.html) + + #solve captcha + found = re.search('recaptcha/api/(?:challenge|noscript)?k=(.+?)', self.html) + captcha_key = found.group(1) if found else "6LdEFb0SAAAAAAwM70vnYo2AkiVkCx-xmfniatHz" + + recaptcha = ReCaptcha(self) + + inputs['recaptcha_challenge_field'], inputs['recaptcha_response_field'] = recaptcha.challenge(captcha_key) + self.wait() + + #validate + self.req.http.c.setopt(FOLLOWLOCATION, 0) + self.html = self.load(action, post = inputs) + + found = re.search(r"Location\s*:\s*(\S*)", self.req.http.header, re.I) + if found: + self.correctCaptcha() + download_url = found.group(1) + else: + if "Sicherheitscode falsch" in self.html: + self.invalidCaptcha() + self.retry(max_tries = 5, reason = "Invalid captcha") + else: + self.fail("Invalid session") + + #download + self.req.http.c.setopt(FOLLOWLOCATION, 1) + self.download(download_url) + + check = self.checkDownload({ + "ip_blocked": re.compile(r'
    \w+)" + __version__ = "1.01" + __description__ = """Speedload.org hoster plugin""" + __author_name__ = ("stickell") + __author_mail__ = ("l.stickell@yahoo.it") + + FILE_NAME_PATTERN = r'Filename:(?P[^<]+)' + FILE_SIZE_PATTERN = r'Size:[\w. ]+\((?P\d+) bytes\)' + + HOSTER_NAME = "speedload.org" + + def handlePremium(self): + self.download(self.pyfile.url, post = self.getPostParameters()) + +getInfo = create_getInfo(SpeedLoadOrg) diff --git a/pyload/plugins/hoster/SpeedfileCz.py b/pyload/plugins/hoster/SpeedfileCz.py new file mode 100644 index 000000000..bfd316dfa --- /dev/null +++ b/pyload/plugins/hoster/SpeedfileCz.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: zoidberg +""" + +import re +from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo + +class SpeedfileCz(SimpleHoster): + __name__ = "SpeedFileCz" + __type__ = "hoster" + __pattern__ = r"http://speedfile.cz/.*" + __version__ = "0.31" + __description__ = """speedfile.cz""" + __author_name__ = ("zoidberg") + + FILE_NAME_PATTERN = r'' + FILE_SIZE_PATTERN = r'(?P[0-9.]+) (?P[kKMG])i?B' + URL_PATTERN = r'' + FILE_OFFLINE_PATTERN = r'Speedfile \| 404' + WAIT_PATTERN = r'"requestedAt":(\d+),"allowedAt":(\d+),"adUri"' + + def setup(self): + self.multiDL = False + + def process(self, pyfile): + self.html = self.load(pyfile.url, decode=True) + + if re.search(self.FILE_OFFLINE_PATTERN, self.html): + self.offline() + + found = re.search(self.FILE_NAME_PATTERN, self.html) + if found is None: + self.fail("Parse error (NAME)") + pyfile.name = found.group(1) + + found = re.search(self.URL_PATTERN, self.html) + if found is None: + self.fail("Parse error (URL)") + download_url = "http://speedfile.cz/" + found.group(1) + + self.html = self.load(download_url) + self.logDebug(self.html) + found = re.search(self.WAIT_PATTERN, self.html) + if found is None: + self.fail("Parse error (WAIT)") + self.setWait(int(found.group(2)) - int(found.group(1))) + self.wait() + + self.download(download_url) + +create_getInfo(SpeedfileCz) \ No newline at end of file diff --git a/pyload/plugins/hoster/StreamCz.py b/pyload/plugins/hoster/StreamCz.py new file mode 100644 index 000000000..ca1033502 --- /dev/null +++ b/pyload/plugins/hoster/StreamCz.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see <http://www.gnu.org/licenses/>. + + @author: zoidberg +""" + +import re +from module.plugins.Hoster import Hoster +from module.network.RequestFactory import getURL + +def getInfo(urls): + result = [] + + for url in urls: + + html = getURL(url) + if re.search(StreamCz.FILE_OFFLINE_PATTERN, html): + # File offline + result.append((url, 0, 1, url)) + else: + result.append((url, 0, 2, url)) + yield result + +class StreamCz(Hoster): + __name__ = "StreamCz" + __type__ = "hoster" + __pattern__ = r"http://www.stream.cz/[^/]+/\d+.*" + __version__ = "0.1" + __description__ = """stream.cz""" + __author_name__ = ("zoidberg") + + FILE_OFFLINE_PATTERN = r'<h1 class="commonTitle">Str.nku nebylo mo.n. nal.zt \(404\)</h1>' + FILE_NAME_PATTERN = r'<link rel="video_src" href="http://www.stream.cz/\w+/(\d+)-([^"]+)" />' + CDN_PATTERN = r'<param name="flashvars" value="[^"]*&id=(?P<ID>\d+)(?:&cdnLQ=(?P<cdnLQ>\d*))?(?:&cdnHQ=(?P<cdnHQ>\d*))?(?:&cdnHD=(?P<cdnHD>\d*))?&' + + def setup(self): + self.multiDL = True + self.resumeDownload = True + + def process(self, pyfile): + + self.html = self.load(pyfile.url, decode=True) + + if re.search(self.FILE_OFFLINE_PATTERN, self.html): + self.offline() + + found = re.search(self.CDN_PATTERN, self.html) + if found is None: self.fail("Parse error (CDN)") + cdn = found.groupdict() + self.logDebug(cdn) + for cdnkey in ("cdnHD", "cdnHQ", "cdnLQ"): + if cdn.has_key(cdnkey) and cdn[cdnkey] > '': + cdnid = cdn[cdnkey] + break + else: + self.fail("Stream URL not found") + + found = re.search(self.FILE_NAME_PATTERN, self.html) + if found is None: self.fail("Parse error (NAME)") + pyfile.name = "%s-%s.%s.mp4" % (found.group(2), found.group(1), cdnkey[-2:]) + + download_url = "http://cdn-dispatcher.stream.cz/?id=" + cdnid + self.logInfo("STREAM (%s): %s" % (cdnkey[-2:], download_url)) + self.download(download_url) diff --git a/pyload/plugins/hoster/TurbobitNet.py b/pyload/plugins/hoster/TurbobitNet.py new file mode 100644 index 000000000..b429d5510 --- /dev/null +++ b/pyload/plugins/hoster/TurbobitNet.py @@ -0,0 +1,170 @@ +# -*- coding: utf-8 -*- +""" + Copyright (C) 2012 pyLoad team + Copyright (C) 2012 JD-Team support@jdownloader.org + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see <http://www.gnu.org/licenses/>. + + @author: zoidberg +""" + +import re +import random +from urllib import quote +from binascii import hexlify, unhexlify +from Crypto.Cipher import ARC4 +import time + +from module.network.RequestFactory import getURL +from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo, timestamp +from module.plugins.ReCaptcha import ReCaptcha + +from pycurl import HTTPHEADER + +class TurbobitNet(SimpleHoster): + __name__ = "TurbobitNet" + __type__ = "hoster" + __pattern__ = r"http://(?:\w*\.)?(turbobit.net|unextfiles.com)/(?:download/free/)?(?P<ID>\w+).*" + __version__ = "0.08" + __description__ = """Turbobit.net plugin""" + __author_name__ = ("zoidberg") + __author_mail__ = ("zoidberg@mujmail.cz") + + FILE_INFO_PATTERN = r"<span class='file-icon1[^>]*>(?P<N>[^<]+)</span>\s*\((?P<S>[^\)]+)\)\s*</h1>" #long filenames are shortened + FILE_NAME_PATTERN = r'<meta name="keywords" content="\s+(?P<N>[^,]+)' #full name but missing on page2 + FILE_OFFLINE_PATTERN = r'<h2>File Not Found</h2>|html\(\'File was not found' + FILE_URL_REPLACEMENTS = [(r"http://(?:\w*\.)?(turbobit.net|unextfiles.com)/(?:download/free/)?(?P<ID>\w+).*", "http://turbobit.net/\g<ID>.html")] + SH_COOKIES = [("turbobit.net", "user_lang", "en")] + + CAPTCHA_KEY_PATTERN = r'src="http://api\.recaptcha\.net/challenge\?k=([^"]+)"' + DOWNLOAD_URL_PATTERN = r'(?P<url>/download/redirect/[^"\']+)' + LIMIT_WAIT_PATTERN = r'<div id="time-limit-text">\s*.*?<span id=\'timeout\'>(\d+)</span>' + CAPTCHA_SRC_PATTERN = r'<img alt="Captcha" src="(.*?)"' + + def handleFree(self): + self.url = "http://turbobit.net/download/free/%s" % self.file_info['ID'] + self.html = self.load(self.url) + + rtUpdate = self.getRtUpdate() + + self.solveCaptcha() + self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With: XMLHttpRequest"]) + self.url = self.getDownloadUrl(rtUpdate) + + self.wait() + self.html = self.load(self.url) + self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With:"]) + self.downloadFile() + + def solveCaptcha(self): + for i in range(5): + found = re.search(self.LIMIT_WAIT_PATTERN, self.html) + if found: + wait_time = int(found.group(1)) + self.setWait(wait_time, wait_time > 60) + self.wait() + self.retry() + + action, inputs = self.parseHtmlForm("action='#'") + if not inputs: self.parseError("captcha form") + self.logDebug(inputs) + + if inputs['captcha_type'] == 'recaptcha': + recaptcha = ReCaptcha(self) + found = re.search(self.CAPTCHA_KEY_PATTERN, self.html) + captcha_key = found.group(1) if found else '6LcTGLoSAAAAAHCWY9TTIrQfjUlxu6kZlTYP50_c' + inputs['recaptcha_challenge_field'], inputs['recaptcha_response_field'] = recaptcha.challenge(captcha_key) + else: + found = re.search(self.CAPTCHA_SRC_PATTERN, self.html) + if not found: self.parseError('captcha') + captcha_url = found.group(1) + inputs['captcha_response'] = self.decryptCaptcha(captcha_url) + + self.logDebug(inputs) + self.html = self.load(self.url, post = inputs) + + if not "<div class='download-timer-header'>" in self.html: + self.invalidCaptcha() + else: + self.correctCaptcha() + break + else: self.fail("Invalid captcha") + + def getRtUpdate(self): + rtUpdate = self.getStorage("rtUpdate") + if not rtUpdate: + if self.getStorage("version") != self.__version__ or int(self.getStorage("timestamp", 0)) + 86400000 < timestamp(): + # that's right, we are even using jdownloader updates + rtUpdate = getURL("http://update0.jdownloader.org/pluginstuff/tbupdate.js") + rtUpdate = self.decrypt(rtUpdate.splitlines()[1]) + # but we still need to fix the syntax to work with other engines than rhino + rtUpdate = re.sub(r'for each\(var (\w+) in(\[[^\]]+\])\)\{',r'zza=\2;for(var zzi=0;zzi<zza.length;zzi++){\1=zza[zzi];',rtUpdate) + rtUpdate = re.sub(r"for\((\w+)=",r"for(var \1=", rtUpdate) + + self.logDebug("rtUpdate") + self.setStorage("rtUpdate", rtUpdate) + self.setStorage("timestamp", timestamp()) + self.setStorage("version", self.__version__) + else: + self.logError("Unable to download, wait for update...") + self.tempOffline() + + return rtUpdate + + def getDownloadUrl(self, rtUpdate): + self.req.http.lastURL = self.url + + found = re.search("(/\w+/timeout\.js\?\w+=)([^\"\'<>]+)", self.html) + url = "http://turbobit.net%s%s" % (found.groups() if found else ('/files/timeout.js?ver=', ''.join(random.choice('0123456789ABCDEF') for x in range(32)))) + fun = self.load(url) + + self.setWait(65, False) + + for b in [1,3]: + self.jscode = "var id = \'%s\';var b = %d;var inn = \'%s\';%sout" % (self.file_info['ID'], b, quote(fun), rtUpdate) + + try: + out = self.js.eval(self.jscode) + self.logDebug("URL", self.js.engine, out) + if out.startswith('/download/'): + return "http://turbobit.net%s" % out.strip() + except Exception, e: + self.logError(e) + else: + if self.retries >= 2: + # retry with updated js + self.delStorage("rtUpdate") + self.retry() + + def decrypt(self, data): + cipher = ARC4.new(hexlify('E\x15\xa1\x9e\xa3M\xa0\xc6\xa0\x84\xb6H\x83\xa8o\xa0')) + return unhexlify(cipher.encrypt(unhexlify(data))) + + def getLocalTimeString(self): + lt = time.localtime() + tz = time.altzone if lt.tm_isdst else time.timezone + return "%s GMT%+03d%02d" % (time.strftime("%a %b %d %Y %H:%M:%S", lt), -tz // 3600, tz % 3600) + + def handlePremium(self): + self.logDebug("Premium download as user %s" % self.user) + self.downloadFile() + + def downloadFile(self): + found = re.search(self.DOWNLOAD_URL_PATTERN, self.html) + if not found: self.parseError("download link") + self.url = "http://turbobit.net" + found.group('url') + self.logDebug(self.url) + self.download(self.url) + +getInfo = create_getInfo(TurbobitNet) diff --git a/pyload/plugins/hoster/TurbouploadCom.py b/pyload/plugins/hoster/TurbouploadCom.py new file mode 100644 index 000000000..6e81c6319 --- /dev/null +++ b/pyload/plugins/hoster/TurbouploadCom.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see <http://www.gnu.org/licenses/>. + + @author: zoidberg +""" + +import re +from module.plugins.internal.DeadHoster import DeadHoster as EasybytezCom, create_getInfo +#from module.plugins.internal.SimpleHoster import create_getInfo +#from module.plugins.hoster.EasybytezCom import EasybytezCom + +class TurbouploadCom(EasybytezCom): + __name__ = "TurbouploadCom" + __type__ = "hoster" + __pattern__ = r"http://(?:\w*\.)?turboupload.com/(\w+).*" + __version__ = "0.02" + __description__ = """turboupload.com""" + __author_name__ = ("zoidberg") + __author_mail__ = ("zoidberg@mujmail.cz") + + # shares code with EasybytezCom + + DIRECT_LINK_PATTERN = r'<a href="(http://turboupload.com/files/[^"]+)">\1</a>' + + def handleFree(self): + self.html = self.load(self.pyfile.url, post = self.getPostParameters(), ref = True, cookies = True) + found = re.search(self.DIRECT_LINK_PATTERN, self.html) + if not found: self.parseError('Download Link') + url = found.group(1) + self.logDebug('URL: ' + url) + self.download(url) + +getInfo = create_getInfo(TurbouploadCom) \ No newline at end of file diff --git a/pyload/plugins/hoster/TusfilesNet.py b/pyload/plugins/hoster/TusfilesNet.py new file mode 100644 index 000000000..517df8561 --- /dev/null +++ b/pyload/plugins/hoster/TusfilesNet.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +from module.plugins.hoster.XFileSharingPro import XFileSharingPro, create_getInfo + +class TusfilesNet(XFileSharingPro): + __name__ = "TusfilesNet" + __type__ = "hoster" + __pattern__ = r"http://(www\.)?tusfiles\.net/\w{12}" + __version__ = "0.01" + __description__ = """Tusfiles.net hoster plugin""" + __author_name__ = ("stickell") + __author_mail__ = ("l.stickell@yahoo.it") + + FILE_INFO_PATTERN = r'<li>(?P<N>[^<]+)</li>\s+<li><b>Size:</b> <small>(?P<S>[\d.]+) (?P<U>\w+)</small></li>' + FILE_OFFLINE_PATTERN = r'The file you were looking for could not be found' + + HOSTER_NAME = "tusfiles.net" + +getInfo = create_getInfo(TusfilesNet) diff --git a/pyload/plugins/hoster/TwoSharedCom.py b/pyload/plugins/hoster/TwoSharedCom.py new file mode 100644 index 000000000..8401e0cb0 --- /dev/null +++ b/pyload/plugins/hoster/TwoSharedCom.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo +import re + +class TwoSharedCom(SimpleHoster): + __name__ = "TwoSharedCom" + __type__ = "hoster" + __pattern__ = r"http://[\w\.]*?2shared.com/(account/)?(download|get|file|document|photo|video|audio)/.*" + __version__ = "0.11" + __description__ = """2Shared Download Hoster""" + __author_name__ = ("zoidberg") + __author_mail__ = ("zoidberg@mujmail.cz") + + FILE_NAME_PATTERN = r'<h1>(?P<N>.*)</h1>' + FILE_SIZE_PATTERN = r'<span class="dtitle">File size:</span>\s*(?P<S>[0-9,.]+) (?P<U>[kKMG])i?B' + FILE_OFFLINE_PATTERN = r'The file link that you requested is not valid\.|This file was deleted\.' + DOWNLOAD_URL_PATTERN = r"window.location ='([^']+)';" + + def setup(self): + self.resumeDownload = self.multiDL = True + + def handleFree(self): + found = re.search(self.DOWNLOAD_URL_PATTERN, self.html) + if not found: self.parseError('Download link') + link = found.group(1) + self.logDebug("Download URL %s" % link) + + self.download(link) + +getInfo = create_getInfo(TwoSharedCom) + \ No newline at end of file diff --git a/pyload/plugins/hoster/UlozTo.py b/pyload/plugins/hoster/UlozTo.py new file mode 100644 index 000000000..5c38fdaad --- /dev/null +++ b/pyload/plugins/hoster/UlozTo.py @@ -0,0 +1,156 @@ +# -*- coding: utf-8 -*- +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see <http://www.gnu.org/licenses/>. + + @author: zoidberg +""" + +import re +from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo + +def convertDecimalPrefix(m): + # decimal prefixes used in filesize and traffic + return ("%%.%df" % {'k':3,'M':6,'G':9}[m.group(2)] % float(m.group(1))).replace('.','') + +class UlozTo(SimpleHoster): + __name__ = "UlozTo" + __type__ = "hoster" + __pattern__ = r"http://(\w*\.)?(uloz\.to|ulozto\.(cz|sk|net)|bagruj.cz|zachowajto.pl)/(?:live/)?(?P<id>\w+/[^/?]*)" + __version__ = "0.92" + __description__ = """uloz.to""" + __author_name__ = ("zoidberg") + + FILE_NAME_PATTERN = r'<a href="#download" class="jsShowDownload">(?P<N>[^<]+)</a>' + FILE_SIZE_PATTERN = r'<span id="fileSize">.*?(?P<S>[0-9.]+\s[kMG]?B)</span>' + FILE_INFO_PATTERN = r'<p>File <strong>(?P<N>[^<]+)</strong> is password protected</p>' + FILE_OFFLINE_PATTERN = r'<title>404 - Page not found|

    File (has been deleted|was banned)

    ' + FILE_SIZE_REPLACEMENTS = [('([0-9.]+)\s([kMG])B', convertDecimalPrefix)] + FILE_URL_REPLACEMENTS = [(r"(?<=http://)([^/]+)", "www.ulozto.net")] + + PASSWD_PATTERN = r'
    ' + VIPLINK_PATTERN = r'' + FREE_URL_PATTERN = r'
    \s*
  • Error rewriting the text.
  • '), + "offline": re.compile(self.FILE_OFFLINE_PATTERN), + "passwd": self.PASSWD_PATTERN, + "server_error": 'src="http://img.ulozto.cz/error403/vykricnik.jpg"', #paralell dl, server overload etc. + "not_found": "Ulož.to" + }) + + if check == "wrong_captcha": + self.delStorage("captcha_id") + self.delStorage("captcha_text") + self.invalidCaptcha() + self.retry(reason="Wrong captcha code") + elif check == "offline": + self.offline() + elif check == "passwd": + self.fail("Wrong password") + elif check == "server_error": + self.logError("Server error, try downloading later") + self.multiDL = False + self.setWait(3600, True) + self.wait() + self.retry() + elif check == "not_found": + self.fail("Server error - file not downloadable") + +getInfo = create_getInfo(UlozTo) diff --git a/pyload/plugins/hoster/UloziskoSk.py b/pyload/plugins/hoster/UloziskoSk.py new file mode 100644 index 000000000..c607e7a5b --- /dev/null +++ b/pyload/plugins/hoster/UloziskoSk.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: zoidberg +""" + +import re +from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo, PluginParseError + +class UloziskoSk(SimpleHoster): + __name__ = "UloziskoSk" + __type__ = "hoster" + __pattern__ = r"http://(\w*\.)?ulozisko.sk/.*" + __version__ = "0.23" + __description__ = """Ulozisko.sk""" + __author_name__ = ("zoidberg") + + URL_PATTERN = r'' + ID_PATTERN = r'' + FILE_NAME_PATTERN = r'
    (?P[^<]+)
    ' + FILE_SIZE_PATTERN = ur'Veľkosť súboru: (?P[0-9.]+) (?P[kKMG])i?B
    ' + CAPTCHA_PATTERN = r'' + FILE_OFFLINE_PATTERN = ur'Zadaný súbor neexistuje z jedného z nasledujúcich dôvodov:' + IMG_PATTERN = ur'PRE ZVÄČŠENIE KLIKNITE NA OBRÁZOK
    ' + + def process(self, pyfile): + self.html = self.load(pyfile.url, decode=True) + self.getFileInfo() + + found = re.search(self.IMG_PATTERN, self.html) + if found: + url = "http://ulozisko.sk" + found.group(1) + self.download(url) + else: + self.handleFree() + + def handleFree(self): + found = re.search(self.URL_PATTERN, self.html) + if found is None: raise PluginParseError('URL') + parsed_url = 'http://www.ulozisko.sk' + found.group(1) + + found = re.search(self.ID_PATTERN, self.html) + if found is None: raise PluginParseError('ID') + id = found.group(1) + + self.logDebug('URL:' + parsed_url + ' ID:' + id) + + found = re.search(self.CAPTCHA_PATTERN, self.html) + if found is None: raise PluginParseError('CAPTCHA') + captcha_url = 'http://www.ulozisko.sk' + found.group(1) + + captcha = self.decryptCaptcha(captcha_url, cookies=True) + + self.logDebug('CAPTCHA_URL:' + captcha_url + ' CAPTCHA:' + captcha) + + self.download(parsed_url, post={ + "antispam": captcha, + "id": id, + "name": self.pyfile.name, + "but": "++++STIAHNI+S%DABOR++++" + }) + +getInfo = create_getInfo(UloziskoSk) diff --git a/pyload/plugins/hoster/UnibytesCom.py b/pyload/plugins/hoster/UnibytesCom.py new file mode 100644 index 000000000..3c8552271 --- /dev/null +++ b/pyload/plugins/hoster/UnibytesCom.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: zoidberg +""" + +import re +from pycurl import FOLLOWLOCATION +from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo + +class UnibytesCom(SimpleHoster): + __name__ = "UnibytesCom" + __type__ = "hoster" + __pattern__ = r"http://(www\.)?unibytes\.com/[a-zA-Z0-9-._ ]{11}B" + __version__ = "0.1" + __description__ = """UniBytes.com""" + __author_name__ = ("zoidberg") + + FILE_INFO_PATTERN = r']*?id="fileName"[^>]*>(?P[^>]+)\s*\((?P\d.*?)\)' + DOMAIN = 'http://www.unibytes.com' + + WAIT_PATTERN = r'Wait for (\d+) sec' + DOWNLOAD_LINK_PATTERN = r'Download' + + def handleFree(self): + action, post_data = self.parseHtmlForm('id="startForm"') + self.req.http.c.setopt(FOLLOWLOCATION, 0) + + for i in range(8): + self.logDebug(action, post_data) + self.html = self.load(self.DOMAIN + action, post = post_data) + + found = re.search(r'location:\s*(\S+)', self.req.http.header, re.I) + if found: + url = found.group(1) + break + + if '>Somebody else is already downloading using your IP-address<' in self.html: + self.setWait(600, True) + self.wait() + self.retry() + + if post_data['step'] == 'last': + found = re.search(self.DOWNLOAD_LINK_PATTERN, self.html) + if found: + url = found.group(1) + self.correctCaptcha() + break + else: + self.invalidCaptcha() + + last_step = post_data['step'] + action, post_data = self.parseHtmlForm('id="stepForm"') + + if last_step == 'timer': + found = re.search(self.WAIT_PATTERN, self.html) + self.setWait(int(found.group(1)) if found else 60, False) + self.wait() + elif last_step in ('captcha', 'last'): + post_data['captcha'] = self.decryptCaptcha(self.DOMAIN + '/captcha.jpg') + else: + self.fail("No valid captcha code entered") + + self.logDebug('Download link: ' + url) + self.req.http.c.setopt(FOLLOWLOCATION, 1) + self.download(url) + +getInfo = create_getInfo(UnibytesCom) \ No newline at end of file diff --git a/pyload/plugins/hoster/UploadStationCom.py b/pyload/plugins/hoster/UploadStationCom.py new file mode 100644 index 000000000..96dc7b577 --- /dev/null +++ b/pyload/plugins/hoster/UploadStationCom.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- +from module.plugins.hoster.FileserveCom import FileserveCom, checkFile +from module.plugins.Plugin import chunks + +class UploadStationCom(FileserveCom): + __name__ = "UploadStationCom" + __type__ = "hoster" + __pattern__ = r"http://(?:www\.)?uploadstation\.com/file/(?P[A-Za-z0-9]+)" + __version__ = "0.51" + __description__ = """UploadStation.Com File Download Hoster""" + __author_name__ = ("fragonib", "zoidberg") + __author_mail__ = ("fragonib[AT]yahoo[DOT]es", "zoidberg@mujmail.cz") + + URLS = ['http://www.uploadstation.com/file/', 'http://www.uploadstation.com/check-links.php', 'http://www.uploadstation.com/checkReCaptcha.php'] + LINKCHECK_TR = r'
    (.*?)\t{9}
    ' + LINKCHECK_TD = r'
    (?:<[^>]*>| )*([^<]*)' + + LONG_WAIT_PATTERN = r'

    You have to wait (\d+) (\w+) to download the next file\.

    ' + +def getInfo(urls): + for chunk in chunks(urls, 100): yield checkFile(UploadStationCom, chunk) \ No newline at end of file diff --git a/pyload/plugins/hoster/UploadedTo.py b/pyload/plugins/hoster/UploadedTo.py new file mode 100644 index 000000000..5aa08891e --- /dev/null +++ b/pyload/plugins/hoster/UploadedTo.py @@ -0,0 +1,238 @@ +# -*- coding: utf-8 -*- + +# Test links (random.bin): +# http://ul.to/044yug9o +# http://ul.to/gzfhd0xs + +import re +from time import sleep + +from module.utils import html_unescape, parseFileSize, chunks + +from module.plugins.Hoster import Hoster +from module.network.RequestFactory import getURL +from module.plugins.ReCaptcha import ReCaptcha + +key = "bGhGMkllZXByd2VEZnU5Y2NXbHhYVlZ5cEE1bkEzRUw=".decode('base64') + + +def getID(url): + """ returns id from file url""" + m = re.match(UploadedTo.__pattern__, url) + return m.group('ID') + + +def getAPIData(urls): + post = {"apikey": key} + + idMap = {} + + for i, url in enumerate(urls): + id = getID(url) + post["id_%s" % i] = id + idMap[id] = url + + for i in xrange(5): + api = unicode(getURL("http://uploaded.net/api/filemultiple", post=post, decode=False), 'iso-8859-1') + if api != "can't find request": + break + else: + sleep(3) + + result = {} + + if api: + for line in api.splitlines(): + data = line.split(",", 4) + if data[1] in idMap: + result[data[1]] = (data[0], data[2], data[4], data[3], idMap[data[1]]) + + return result + + +def parseFileInfo(self, url='', html=''): + if not html and hasattr(self, "html"): html = self.html + name, size, status, found, fileid = url, 0, 3, None, None + + if re.search(self.FILE_OFFLINE_PATTERN, html): + # File offline + status = 1 + else: + found = re.search(self.FILE_INFO_PATTERN, html) + if found: + name, fileid = html_unescape(found.group('N')), found.group('ID') + size = parseFileSize(found.group('S')) + status = 2 + + return name, size, status, fileid + + +def getInfo(urls): + for chunk in chunks(urls, 80): + result = [] + + api = getAPIData(chunk) + + for data in api.itervalues(): + if data[0] == "online": + result.append((html_unescape(data[2]), data[1], 2, data[4])) + + elif data[0] == "offline": + result.append((data[4], 0, 1, data[4])) + + yield result + + +class UploadedTo(Hoster): + __name__ = "UploadedTo" + __type__ = "hoster" + __pattern__ = r"https?://[\w\.-]*?(uploaded\.(to|net)|ul\.to)(/file/|/?\?id=|.*?&id=|/)(?P\w+)" + __version__ = "0.70" + __description__ = """Uploaded.net Download Hoster""" + __author_name__ = ("spoob", "mkaay", "zoidberg", "netpok", "stickell") + __author_mail__ = ("spoob@pyload.org", "mkaay@mkaay.de", "zoidberg@mujmail.cz", "netpok@gmail.com", "l.stickell@yahoo.it") + + FILE_INFO_PATTERN = r'(?P[^<]+)  \s*]*>(?P[^<]+)' + FILE_OFFLINE_PATTERN = r'Error: 404' + DL_LIMIT_PATTERN = "You have reached the max. number of possible free downloads for this hour" + + def setup(self): + self.html = None + self.multiDL = False + self.resumeDownload = False + self.url = False + self.chunkLimit = 1 # critical problems with more chunks + if self.account: + self.premium = self.account.getAccountInfo(self.user)["premium"] + if self.premium: + self.multiDL = True + self.resumeDownload = True + + self.fileID = getID(self.pyfile.url) + self.pyfile.url = "http://uploaded.net/file/%s" % self.fileID + + def process(self, pyfile): + self.load("http://uploaded.net/language/en", just_header=True) + + api = getAPIData([pyfile.url]) + + # TODO: fallback to parse from site, because api sometimes delivers wrong status codes + + if not api: + self.logWarning("No response for API call") + + self.html = unicode(self.load(pyfile.url, decode=False), 'iso-8859-1') + name, size, status, self.fileID = parseFileInfo(self) + self.logDebug(name, size, status, self.fileID) + if status == 1: + self.offline() + elif status == 2: + pyfile.name, pyfile.size = name, size + else: + self.fail('Parse error - file info') + elif api == 'Access denied': + self.fail(_("API key invalid")) + + else: + if self.fileID not in api: + self.offline() + + self.data = api[self.fileID] + if self.data[0] != "online": + self.offline() + + pyfile.name = html_unescape(self.data[2]) + + # self.pyfile.name = self.get_file_name() + + if self.premium: + self.handlePremium() + else: + self.handleFree() + + + def handlePremium(self): + info = self.account.getAccountInfo(self.user, True) + self.log.debug("%(name)s: Use Premium Account (%(left)sGB left)" % {"name": self.__name__, + "left": info["trafficleft"] / 1024 / 1024}) + if int(self.data[1]) / 1024 > info["trafficleft"]: + self.log.info(_("%s: Not enough traffic left" % self.__name__)) + self.account.empty(self.user) + self.resetAccount() + self.fail(_("Traffic exceeded")) + + header = self.load("http://uploaded.net/file/%s" % self.fileID, just_header=True) + if "location" in header: + #Direct download + print "Direct Download: " + header['location'] + self.download(header['location']) + else: + #Indirect download + self.html = self.load("http://uploaded.net/file/%s" % self.fileID) + found = re.search(r'
    (\d+) seconds", self.html) + if not found: + self.fail("File not downloadable for free users") + self.setWait(int(found.group(1))) + + js = self.load("http://uploaded.net/js/download.js", decode=True) + + challengeId = re.search(r'Recaptcha\.create\("([^"]+)', js) + + url = "http://uploaded.net/io/ticket/captcha/%s" % self.fileID + downloadURL = "" + + for i in range(5): + #self.req.lastURL = str(self.url) + re_captcha = ReCaptcha(self) + challenge, result = re_captcha.challenge(challengeId.group(1)) + options = {"recaptcha_challenge_field": challenge, "recaptcha_response_field": result} + self.wait() + + result = self.load(url, post=options) + self.logDebug("result: %s" % result) + + if "limit-size" in result: + self.fail("File too big for free download") + elif "limit-slot" in result: # Temporary restriction so just wait a bit + self.setWait(30 * 60, True) + self.wait() + self.retry() + elif "limit-parallel" in result: + self.fail("Cannot download in parallel") + elif self.DL_LIMIT_PATTERN in result: # limit-dl + self.setWait(3 * 60 * 60, True) + self.wait() + self.retry() + elif 'err:"captcha"' in result: + self.logError("ul.net captcha is disabled") + self.invalidCaptcha() + elif "type:'download'" in result: + self.correctCaptcha() + downloadURL = re.search("url:'([^']+)", result).group(1) + break + else: + self.fail("Unknown error '%s'") + + if not downloadURL: + self.fail("No Download url retrieved/all captcha attempts failed") + + self.download(downloadURL, disposition=True) + check = self.checkDownload({"limit-dl": self.DL_LIMIT_PATTERN}) + if check == "limit-dl": + self.setWait(3 * 60 * 60, True) + self.wait() + self.retry() diff --git a/pyload/plugins/hoster/UploadheroCom.py b/pyload/plugins/hoster/UploadheroCom.py new file mode 100644 index 000000000..502f849af --- /dev/null +++ b/pyload/plugins/hoster/UploadheroCom.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: zoidberg +""" + +import re +from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo + +class UploadheroCom(SimpleHoster): + __name__ = "UploadheroCom" + __type__ = "hoster" + __pattern__ = r"http://(?:www\.)?uploadhero\.com?/dl/\w+" + __version__ = "0.13" + __description__ = """UploadHero.com plugin""" + __author_name__ = ("mcmyst", "zoidberg") + __author_mail__ = ("mcmyst@hotmail.fr", "zoidberg@mujmail.cz") + + SH_COOKIES = [("http://uploadhero.com", "lang", "en")] + FILE_NAME_PATTERN = r'
    (?P.*?)
    ' + FILE_SIZE_PATTERN = r'Taille du fichier : (?P.*?)' + FILE_OFFLINE_PATTERN = r'

    |

    Le lien du fichier ci-dessus n\'existe plus.' + + DOWNLOAD_URL_PATTERN = r'(\d+).*\s*(\d+)' + + CAPTCHA_PATTERN = r'"(/captchadl\.php\?[a-z0-9]+)"' + FREE_URL_PATTERN = r'var magicomfg = \'"/]+)"' + + def handleFree(self): + self.checkErrors() + + found = re.search(self.CAPTCHA_PATTERN, self.html) + if not found: self.parseError("Captcha URL") + captcha_url = "http://uploadhero.com" + found.group(1) + + for i in range(5): + captcha = self.decryptCaptcha(captcha_url) + self.html = self.load(self.pyfile.url, get = {"code": captcha}) + found = re.search(self.FREE_URL_PATTERN, self.html) + if found: + self.correctCaptcha() + download_url = found.group(1) or found.group(2) + break + else: + self.invalidCaptcha() + else: + self.fail("No valid captcha code entered") + + self.download(download_url) + + def handlePremium(self): + self.log.debug("%s: Use Premium Account" % self.__name__) + self.html = self.load(self.pyfile.url) + link = re.search(self.DOWNLOAD_URL_PATTERN, self.html).group(1) + self.log.debug("Downloading link : '%s'" % link) + self.download(link) + + def checkErrors(self): + found = re.search(self.IP_BLOCKED_PATTERN, self.html) + if found: + self.html = self.load("http://uploadhero.com%s" % found.group(1)) + + found = re.search(self.IP_WAIT_PATTERN, self.html) + wait_time = (int(found.group(1)) * 60 + int(found.group(2))) if found else 300 + self.setWait(wait_time, True) + self.wait() + self.retry() + +getInfo = create_getInfo(UploadheroCom) diff --git a/pyload/plugins/hoster/UploadingCom.py b/pyload/plugins/hoster/UploadingCom.py new file mode 100644 index 000000000..84040eb4d --- /dev/null +++ b/pyload/plugins/hoster/UploadingCom.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: jeix +""" + +import re +from pycurl import HTTPHEADER +from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo, timestamp +from module.common.json_layer import json_loads + +class UploadingCom(SimpleHoster): + __name__ = "UploadingCom" + __type__ = "hoster" + __pattern__ = r"http://(?:www\.)?uploading\.com/files/(?:get/)?(?P[\w\d]+)" + __version__ = "0.32" + __description__ = """Uploading.Com File Download Hoster""" + __author_name__ = ("jeix", "mkaay", "zoidberg") + __author_mail__ = ("jeix@hasnomail.de", "mkaay@mkaay.de", "zoidberg@mujmail.cz") + + FILE_NAME_PATTERN = r'Download (?P<N>.*?) for free on uploading.com' + FILE_SIZE_PATTERN = r'File size: (?P.*?)' + FILE_OFFLINE_PATTERN = r'The requested file is not found' + + def process(self, pyfile): + # set lang to english + self.req.cj.setCookie("uploading.com", "lang", "1") + self.req.cj.setCookie("uploading.com", "language", "1") + self.req.cj.setCookie("uploading.com", "setlang", "en") + self.req.cj.setCookie("uploading.com", "_lang", "en") + + if not "/get/" in self.pyfile.url: + self.pyfile.url = self.pyfile.url.replace("/files", "/files/get") + + self.html = self.load(pyfile.url, decode = True) + self.file_info = self.getFileInfo() + + if self.premium: + self.handlePremium() + else: + self.handleFree() + + def handlePremium(self): + postData = {'action': 'get_link', + 'code': self.file_info['ID'], + 'pass': 'undefined'} + + self.html = self.load('http://uploading.com/files/get/?JsHttpRequest=%d-xml' % timestamp(), post=postData) + url = re.search(r'"link"\s*:\s*"(.*?)"', self.html) + if url: + url = url.group(1).replace("\\/", "/") + self.download(url) + + raise Exception("Plugin defect.") + + def handleFree(self): + found = re.search('

    ((Daily )?Download Limit)

    ', self.html) + if found: + self.pyfile.error = found.group(1) + self.logWarning(self.pyfile.error) + self.retry(max_tries=6, wait_time = 21600 if found.group(2) else 900, reason = self.pyfile.error) + + ajax_url = "http://uploading.com/files/get/?ajax" + self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With: XMLHttpRequest"]) + self.req.http.lastURL = self.pyfile.url + + response = json_loads(self.load(ajax_url, post = {'action': 'second_page', 'code': self.file_info['ID']})) + if 'answer' in response and 'wait_time' in response['answer']: + wait_time = int(response['answer']['wait_time']) + self.log.info("%s: Waiting %d seconds." % (self.__name__, wait_time)) + self.setWait(wait_time) + self.wait() + else: + self.pluginParseError("AJAX/WAIT") + + response = json_loads(self.load(ajax_url, post = {'action': 'get_link', 'code': self.file_info['ID'], 'pass': 'false'})) + if 'answer' in response and 'link' in response['answer']: + url = response['answer']['link'] + else: + self.pluginParseError("AJAX/URL") + + self.html = self.load(url) + found = re.search(r'\s*Download File\s*]*>(?P[^>]+)\s*[^\(]*\((?P[^\)]+)\)' + FILE_OFFLINE_PATTERN = r'
    File Not Found
    ' + HOSTER_NAME = "uptobox.com" + + def setup(self): + self.resumeDownload = self.multiDL = self.premium + self.chunkLimit = 1 + +getInfo = create_getInfo(UptoboxCom) \ No newline at end of file diff --git a/pyload/plugins/hoster/VeehdCom.py b/pyload/plugins/hoster/VeehdCom.py new file mode 100644 index 000000000..23048b831 --- /dev/null +++ b/pyload/plugins/hoster/VeehdCom.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +import re +from module.plugins.Hoster import Hoster + +class VeehdCom(Hoster): + __name__ = 'VeehdCom' + __type__ = 'hoster' + __pattern__ = r'http://veehd\.com/video/\d+_\S+' + __config__ = [ + ('filename_spaces', 'bool', "Allow spaces in filename", 'False'), + ('replacement_char', 'str', "Filename replacement character", '_'), + ] + __version__ = '0.2' + __description__ = """Veehd.com Download Hoster""" + __author_name__ = ('cat') + __author_mail__ = ('cat@pyload') + + def _debug(self, msg): + self.log.debug('[%s] %s' % (self.__name__, msg)) + + def setup(self): + self.html = None + self.multiDL = True + self.req.canContinue = True + + def process(self, pyfile): + self.download_html() + if not self.file_exists(): + self.offline() + + pyfile.name = self.get_file_name() + self.download(self.get_file_url()) + + def download_html(self): + url = self.pyfile.url + self._debug("Requesting page: %s" % (repr(url),)) + self.html = self.load(url) + + def file_exists(self): + if self.html is None: + self.download_html() + + if 'Veehd' in self.html: + return False + return True + + def get_file_name(self): + if self.html is None: + self.download_html() + + match = re.search(r']*>([^<]+) on Veehd', self.html) + if not match: + self.fail("video title not found") + name = match.group(1) + + # replace unwanted characters in filename + if self.getConf('filename_spaces'): + pattern = '[^0-9A-Za-z\.\ ]+' + else: + pattern = '[^0-9A-Za-z\.]+' + + name = re.sub(pattern, self.getConf('replacement_char'), + name) + return name + '.avi' + + def get_file_url(self): + """ returns the absolute downloadable filepath + """ + if self.html is None: + self.download_html() + + match = re.search(r'. + + @author: zoidberg +""" + +#similar to coolshare.cz (down) + +import re +from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo +from module.network.HTTPRequest import BadHeader +from module.utils import html_unescape + +class WarserverCz(SimpleHoster): + __name__ = "WarserverCz" + __type__ = "hoster" + __pattern__ = r"http://(?:\w*\.)?warserver.cz/stahnout/(?P\d+)/.+" + __version__ = "0.12" + __description__ = """Warserver.cz""" + __author_name__ = ("zoidberg") + + FILE_NAME_PATTERN = r'(?P[^<]+)' + FILE_SIZE_PATTERN = r'
  • Velikost: (?P[^<]+)' + FILE_OFFLINE_PATTERN = r'

    Soubor nenalezen

    ' + + PREMIUM_URL_PATTERN = r'href="(http://[^/]+/dwn-premium.php.*?)"' + DOMAIN = "http://csd01.coolshare.cz" + + DOMAIN = "http://s01.warserver.cz" + + def handleFree(self): + try: + self.download("%s/dwn-free.php?fid=%s" % (self.DOMAIN, self.file_info['ID'])) + except BadHeader, e: + self.logError(e) + if e.code == 403: + self.longWait(60,60) + else: raise + self.checkDownloadedFile() + + def handlePremium(self): + found = re.search(self.PREMIUM_URL_PATTERN, self.html) + if not found: self.parseError("Premium URL") + url = html_unescape(found.group(1)) + self.logDebug("Premium URL: " + url) + if not url.startswith("http://"): self.resetAccount() + self.download(url) + self.checkDownloadedFile() + + def checkDownloadedFile(self): + check = self.checkDownload({ + "offline": ">404 Not Found<" + }) + + if check == "offline": + self.offline() + +getInfo = create_getInfo(WarserverCz) \ No newline at end of file diff --git a/pyload/plugins/hoster/WebshareCz.py b/pyload/plugins/hoster/WebshareCz.py new file mode 100644 index 000000000..195e65a93 --- /dev/null +++ b/pyload/plugins/hoster/WebshareCz.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: zoidberg +""" + +import re +from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo +from module.network.HTTPRequest import BadHeader + +class WebshareCz(SimpleHoster): + __name__ = "WebshareCz" + __type__ = "hoster" + __pattern__ = r"http://(\w+\.)?webshare.cz/(stahnout/)?(?P\w{10})-.+" + __version__ = "0.12" + __description__ = """WebShare.cz""" + __author_name__ = ("zoidberg") + + FILE_NAME_PATTERN = r'

    Stahujete soubor:

    \s*
    (?P[^<]+)
    ' + FILE_SIZE_PATTERN = r'

    Velikost souboru je:

    \s*
    (?P[^<]+)
    ' + FILE_OFFLINE_PATTERN = r'

    Soubor ".*?" nebyl nalezen.

    ' + + DOWNLOAD_LINK_PATTERN = r'id="download_link" href="(?P.*?)"' + + def setup(self): + self.multiDL = True + + def handleFree(self): + url_a = re.search(r"(var l.*)", self.html).group(1) + url_b = re.search(r"(var keyStr.*)", self.html).group(1) + url = self.js.eval("%s\n%s\ndec(l)" % (url_a, url_b)) + + self.logDebug('Download link: ' + url) + self.download(url) + +getInfo = create_getInfo(WebshareCz) \ No newline at end of file diff --git a/pyload/plugins/hoster/WrzucTo.py b/pyload/plugins/hoster/WrzucTo.py new file mode 100644 index 000000000..4a5e89f22 --- /dev/null +++ b/pyload/plugins/hoster/WrzucTo.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: zoidberg +""" + +import re +from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo +from pycurl import HTTPHEADER + +class WrzucTo(SimpleHoster): + __name__ = "WrzucTo" + __type__ = "hoster" + __pattern__ = r"http://(?:\w+\.)*?wrzuc\.to/([a-zA-Z0-9]+(\.wt|\.html)|(\w+/?linki/[a-zA-Z0-9]+))" + __version__ = "0.01" + __description__ = """Wrzuc.to plugin - free only""" + __author_name__ = ("zoidberg") + __author_mail__ = ("zoidberg@mujmail.cz") + + SH_COOKIES = [("http://www.wrzuc.to", "language", "en")] + FILE_SIZE_PATTERN = r'class="info">\s*\s*(?P.*?)' + FILE_NAME_PATTERN = r'id="file_info">\s*(?P.*?)' + + def setup(self): + self.multiDL = True + + def handleFree(self): + data = dict(re.findall(r'(md5|file): "(.*?)"', self.html)) + if len(data) != 2: self.parseError('File ID') + + self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With: XMLHttpRequest"]) + self.req.http.lastURL = self.pyfile.url + self.load("http://www.wrzuc.to/ajax/server/prepair", post = {"md5": data['md5']}) + + self.req.http.lastURL = self.pyfile.url + self.html = self.load("http://www.wrzuc.to/ajax/server/download_link", post = {"file": data['file']}) + + data.update(re.findall(r'"(download_link|server_id)":"(.*?)"', self.html)) + if len(data) != 4: self.parseError('Download URL') + + download_url = "http://%s.wrzuc.to/pobierz/%s" % (data['server_id'], data['download_link']) + self.logDebug("Download URL: %s" % download_url) + self.download(download_url) + +getInfo = create_getInfo(WrzucTo) + diff --git a/pyload/plugins/hoster/WuploadCom.py b/pyload/plugins/hoster/WuploadCom.py new file mode 100644 index 000000000..1a0eb442b --- /dev/null +++ b/pyload/plugins/hoster/WuploadCom.py @@ -0,0 +1,241 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import re +import string +from urllib import unquote + +from module.plugins.Hoster import Hoster +from module.plugins.ReCaptcha import ReCaptcha +from module.plugins.Plugin import chunks + +from module.network.RequestFactory import getURL +from module.common.json_layer import json_loads + + +def getInfo(urls): + for chunk in chunks(urls, 20): + result = [] + ids = dict() + for url in chunk: + id = getId(url) + if id: + ids[id] = url + else: + result.append((None, 0, 1, url)) + + if len(ids) > 0: + check_url = "http://api.wupload.com/link?method=getInfo&format=json&ids=" + ",".join(ids.keys()) + response = json_loads(getURL(check_url).decode("utf8", "ignore")) + for item in response["FSApi_Link"]["getInfo"]["response"]["links"]: + if item["status"] != "AVAILABLE": + result.append((None, 0, 1, ids[str(item["id"])])) + else: + result.append((unquote(item["filename"]), item["size"], 2, ids[str(item["id"])])) + yield result + + +def getId(url): + match = re.search(WuploadCom.FILE_ID_PATTERN, url) + if match: + return string.replace(match.group("id"), "/", "-") + else: + return None + + +class WuploadCom(Hoster): + __name__ = "WuploadCom" + __type__ = "hoster" + __pattern__ = r"http://[\w\.]*?wupload\..*?/file/(([a-z][0-9]+/)?[0-9]+)(/.*)?" + __version__ = "0.20" + __description__ = """Wupload com""" + __author_name__ = ("jeix", "paulking") + __author_mail__ = ("jeix@hasnomail.de", "") + + API_ADDRESS = "http://api.wupload.com" + URL_DOMAIN_PATTERN = r'(?P.*?)(?P.wupload\..+?)(?P/.*)' + FILE_ID_PATTERN = r'/file/(?P([a-z][0-9]+/)?[0-9]+)(/.*)?' + FILE_LINK_PATTERN = r'

    Download Link

    \s*]*>([^<]+)' + OVR_KILL_LINK_PATTERN = r'

    Delete Link

    \s*]*>([^<]+)' + CAPTCHA_URL_PATTERN = r'(http://[^"\']+?/captchas?/[^"\']+)' + RECAPTCHA_URL_PATTERN = r'http://[^"\']+?recaptcha[^"\']+?\?k=([^"\']+)"' + CAPTCHA_DIV_PATTERN = r'Enter code.*?(.*?)

    Download Now' + WAIT_TIME_PATTERN = r'countDownDelay = (?P\d+)' + WAIT_TM_PATTERN = r"name='tm' value='(.*?)' />" + WAIT_TM_HASH_PATTERN = r"name='tm_hash' value='(.*?)' />" + CAPTCHA_TYPE1_PATTERN = r'Recaptcha.create\("(.*?)",' + CAPTCHA_TYPE2_PATTERN = r'id="recaptcha_image"> 300: + self.wantReconnect = True + + self.setWait(wait) + self.logDebug("Waiting %d seconds." % wait) + self.wait() + + tm = re.search(self.WAIT_TM_PATTERN, self.html) + tm_hash = re.search(self.WAIT_TM_HASH_PATTERN, self.html) + + if tm and tm_hash: + tm = tm.group(1) + tm_hash = tm_hash.group(1) + self.html = self.load(url, post={"tm": tm, "tm_hash": tm_hash}) + self.handleErrors() + break + else: + self.html = self.load(url) + self.handleErrors() + waitSearch = re.search(self.WAIT_TIME_PATTERN, self.html) + + def handleErrors(self): + if "This file is available for premium users only." in self.html: + self.fail("need premium account for file") + + if "The file that you're trying to download is larger than" in self.html: + self.fail("need premium account for file") + + if "Free users may only download 1 file at a time" in self.html: + self.fail("only 1 file at a time for free users") + + if "Free user can not download files" in self.html: + self.fail("need premium account for file") + + if "Download session in progress" in self.html: + self.fail("already downloading") + + if "This file is password protected" in self.html: + self.fail("This file is password protected") + + if "An Error Occurred" in self.html: + self.fail("A server error occured.") + + if "This file was deleted" in self.html: + self.offline() diff --git a/pyload/plugins/hoster/X7To.py b/pyload/plugins/hoster/X7To.py new file mode 100644 index 000000000..965d84543 --- /dev/null +++ b/pyload/plugins/hoster/X7To.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +import re + +from module.plugins.Hoster import Hoster + +from module.network.RequestFactory import getURL + +def getInfo(urls): + yield [(url, 0, 1, url) for url in urls] + + +class X7To(Hoster): + __name__ = "X7To" + __type__ = "hoster" + __pattern__ = r"http://(?:www.)?x7.to/" + __version__ = "0.3" + __description__ = """X7.To File Download Hoster""" + __author_name__ = ("ernieb") + __author_mail__ = ("ernieb") + + FILE_INFO_PATTERN=r'', self.html) + if overLimit: + self.logDebug("over limit, falling back to free") + self.handleFree() + else: + realurl = re.search(r'', self.html) + if realurl: + realurl = realurl.group(1) + self.logDebug("premium url found %s" % realurl) + else: + self.logDebug("premium link not found") + self.download(realurl) + + def handleFree(self): + # find file id + file_id = re.search(r"var dlID = '(.*?)'", self.html) + if not file_id: + self.fail("Free download id not found") + + file_url = "http://x7.to/james/ticket/dl/" + file_id.group(1) + self.logDebug("download id %s" % file_id.group(1)) + + self.html = self.load(file_url, ref=False, decode=True) + + # deal with errors + if "limit-dl" in self.html: + self.logDebug("Limit reached ... waiting") + self.setWait(900,True) + self.wait() + self.retry() + + if "limit-parallel" in self.html: + self.fail("Cannot download in parallel") + + # no waiting required, go to download + waitCheck = re.search(r"wait:(\d*),", self.html) + if waitCheck: + waitCheck = int(waitCheck.group(1)) + self.setWait(waitCheck) + self.wait() + + urlCheck = re.search(r"url:'(.*?)'", self.html) + url = None + if urlCheck: + url = urlCheck.group(1) + self.logDebug("free url found %s" % url) + + if url: + try: + self.download(url) + except: + self.logDebug("downloading url failed: %s" % url) + else: + self.fail("Free download url found") diff --git a/pyload/plugins/hoster/XFileSharingPro.py b/pyload/plugins/hoster/XFileSharingPro.py new file mode 100644 index 000000000..1120a2a8b --- /dev/null +++ b/pyload/plugins/hoster/XFileSharingPro.py @@ -0,0 +1,314 @@ +# -*- coding: utf-8 -*- +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: zoidberg +""" + +import re +from random import random +from urllib import unquote +from urlparse import urlparse +from pycurl import FOLLOWLOCATION, LOW_SPEED_TIME +from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo, PluginParseError +from module.plugins.ReCaptcha import ReCaptcha +from module.plugins.internal.CaptchaService import SolveMedia, AdsCaptcha +from module.utils import html_unescape + +class XFileSharingPro(SimpleHoster): + """ + Common base for XFileSharingPro hosters like EasybytezCom, CramitIn, FiledinoCom... + Some hosters may work straight away when added to __pattern__ + However, most of them will NOT work because they are either down or running a customized version + """ + __name__ = "XFileSharingPro" + __type__ = "hoster" + __pattern__ = r"^unmatchable$" + __version__ = "0.17" + __description__ = """XFileSharingPro common hoster base""" + __author_name__ = ("zoidberg") + __author_mail__ = ("zoidberg@mujmail.cz") + + FILE_NAME_PATTERN = r'[^<]+ \((?P[^<]+)\)' + FILE_INFO_PATTERN = r'Filename:(?P[^<]+)\s*.*?\((?P[^<]+)\)' + FILE_OFFLINE_PATTERN = r'<(b|h[1-6])>File Not Found' + + WAIT_PATTERN = r'.*?>(\d+)' + LONG_WAIT_PATTERN = r'(?P\d+(?=\s*hour))?.*?(?P\d+(?=\s*minute))?.*?(?P\d+(?=\s*second))?' + OVR_DOWNLOAD_LINK_PATTERN = r'

  • ' + SOLVEMEDIA_PATTERN = r'http:\/\/api\.solvemedia\.com\/papi\/challenge\.script\?k=(.*?)"' + ERROR_PATTERN = r'class=["\']err["\'][^>]*>(.*?)"," ",self.errmsg)) + + if 'wait' in self.errmsg: + wait_time = sum([int(v) * {"hour": 3600, "minute": 60, "second": 1}[u] for v, u in re.findall('(\d+)\s*(hour|minute|second)?', self.errmsg)]) + self.setWait(wait_time, True) + self.wait() + elif 'captcha' in self.errmsg: + self.invalidCaptcha() + elif 'premium' in self.errmsg and 'require' in self.errmsg: + self.fail("File can be downloaded by premium users only") + elif 'limit' in self.errmsg: + self.setWait(3600, True) + self.wait() + self.retry(25) + elif 'countdown' in self.errmsg or 'Expired session' in self.errmsg: + self.retry(3) + elif 'maintenance' in self.errmsg: + self.tempOffline() + elif 'download files up to' in self.errmsg: + self.fail("File too large for free download") + else: + self.fail(self.errmsg) + + else: + self.errmsg = None + + return self.errmsg + + def getPostParameters(self): + for i in range(3): + if not self.errmsg: self.checkErrors() + + if hasattr(self,"FORM_PATTERN"): + action, inputs = self.parseHtmlForm(self.FORM_PATTERN) + else: + action, inputs = self.parseHtmlForm(input_names={"op": re.compile("^download")}) + + if not inputs: + action, inputs = self.parseHtmlForm('F1') + if not inputs: + if self.errmsg: + self.retry() + else: + self.parseError("Form not found") + + self.logDebug(self.HOSTER_NAME, inputs) + + if 'op' in inputs and inputs['op'] in ('download2', 'download3'): + if "password" in inputs: + if self.passwords: + inputs['password'] = self.passwords.pop(0) + else: + self.fail("No or invalid passport") + + if not self.premium: + found = re.search(self.WAIT_PATTERN, self.html) + if found: + wait_time = int(found.group(1)) + 1 + self.setWait(wait_time, False) + else: + wait_time = 0 + + self.captcha = self.handleCaptcha(inputs) + + if wait_time: self.wait() + + self.errmsg = None + return inputs + + else: + inputs['referer'] = self.pyfile.url + + if self.premium: + inputs['method_premium'] = "Premium Download" + if 'method_free' in inputs: del inputs['method_free'] + else: + inputs['method_free'] = "Free Download" + if 'method_premium' in inputs: del inputs['method_premium'] + + self.html = self.load(self.pyfile.url, post = inputs, ref = True) + self.errmsg = None + + else: self.parseError('FORM: %s' % (inputs['op'] if 'op' in inputs else 'UNKNOWN')) + + def handleCaptcha(self, inputs): + found = re.search(self.RECAPTCHA_URL_PATTERN, self.html) + if found: + recaptcha_key = unquote(found.group(1)) + self.logDebug("RECAPTCHA KEY: %s" % recaptcha_key) + recaptcha = ReCaptcha(self) + inputs['recaptcha_challenge_field'], inputs['recaptcha_response_field'] = recaptcha.challenge(recaptcha_key) + return 1 + else: + found = re.search(self.CAPTCHA_URL_PATTERN, self.html) + if found: + captcha_url = found.group(1) + inputs['code'] = self.decryptCaptcha(captcha_url) + return 2 + else: + found = re.search(self.CAPTCHA_DIV_PATTERN, self.html, re.S) + if found: + captcha_div = found.group(1) + self.logDebug(captcha_div) + numerals = re.findall('(\d)', html_unescape(captcha_div)) + inputs['code'] = "".join([a[1] for a in sorted(numerals, key = lambda num: int(num[0]))]) + self.logDebug("CAPTCHA", inputs['code'], numerals) + return 3 + else: + found = re.search(self.SOLVEMEDIA_PATTERN, self.html) + if found: + captcha_key = found.group(1) + captcha = SolveMedia(self) + inputs['adcopy_challenge'], inputs['adcopy_response'] = captcha.challenge(captcha_key) + return 4 + return 0 + +getInfo = create_getInfo(XFileSharingPro) diff --git a/pyload/plugins/hoster/XHamsterCom.py b/pyload/plugins/hoster/XHamsterCom.py new file mode 100644 index 000000000..0779a78e6 --- /dev/null +++ b/pyload/plugins/hoster/XHamsterCom.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import re +from module.plugins.Hoster import Hoster +from urllib import unquote +from module.common.json_layer import json_loads + +def clean_json(json_expr): + json_expr = re.sub('[\n\r]', '', json_expr) + json_expr = re.sub(' +', '', json_expr) + json_expr = re.sub('\'','"', json_expr) + + return json_expr + +class XHamsterCom(Hoster): + __name__ = "XHamsterCom" + __type__ = "hoster" + __pattern__ = r"http://(www\.)?xhamster\.com/movies/.+" + __version__ = "0.1" + __config__ = [("type", ".mp4;.flv", "Preferred type", ".mp4")] + __description__ = """XHamster.com Video Download Hoster""" + + def setup(self): + self.html = None + + def process(self, pyfile): + self.pyfile = pyfile + + if not self.file_exists(): + self.offline() + + if self.getConfig("type"): + self.desired_fmt = self.getConf("type") + + self.pyfile.name = self.get_file_name() + self.desired_fmt + self.download(self.get_file_url()) + + def download_html(self): + url = self.pyfile.url + self.html = self.load(url) + + def get_file_url(self): + """ returns the absolute downloadable filepath + """ + if self.html is None: + self.download_html() + + flashvar_pattern = re.compile('flashvars = ({.*?});', re.DOTALL) + json_flashvar=flashvar_pattern.search(self.html) + + if json_flashvar is None: + self.fail("Parse error (flashvars)") + + j = clean_json(json_flashvar.group(1)) + flashvars = json_loads(j) + + if flashvars["srv"]: + srv_url = flashvars["srv"] + '/' + else: + self.fail("Parse error (srv_url)") + + if flashvars["url_mode"]: + url_mode = flashvars["url_mode"] + else: + self.fail("Parse error (url_mode)") + + + if self.desired_fmt == ".mp4": + file_url = re.search(r"(.*?) - xHamster\.com" + file_name = re.search(file_name_pattern, self.html) + if file_name is None: + file_name_pattern = r"

    (.*)

    " + file_name = re.search(file_name_pattern, self.html) + if file_name is None: + file_name_pattern = r"http://[www.]+xhamster\.com/movies/.*/(.*?)\.html?" + file_name = re.search(file_name_pattern, self.pyfile.url) + if file_name is None: + file_name_pattern = r"
    (.*)
    " + file_name = re.search(file_name_pattern, self.html) + if file_name is None: + return "Unknown" + + return file_name.group(1) + + def file_exists(self): + """ returns True or False + """ + if self.html is None: + self.download_html() + if re.search(r"(.*Video not found.*)", self.html) is not None: + return False + else: + return True diff --git a/pyload/plugins/hoster/XVideosCom.py b/pyload/plugins/hoster/XVideosCom.py new file mode 100644 index 000000000..b7f3f7b58 --- /dev/null +++ b/pyload/plugins/hoster/XVideosCom.py @@ -0,0 +1,19 @@ + +import re +import urllib + +from module.plugins.Hoster import Hoster + +class XVideosCom(Hoster): + __name__ = "XVideos.com" + __version__ = "0.1" + __pattern__ = r"http://www\.xvideos\.com/video([0-9]+)/.*" + __config__ = [] + + def process(self, pyfile): + site = self.load(pyfile.url) + pyfile.name = "%s (%s).flv" %( + re.search(r"

    ([^<]+). + + @author: jeix +""" + +from os.path import join +from os.path import exists +from os import makedirs +import re +import sys +import time +import socket, struct +from select import select +from module.utils import save_join + +from module.plugins.Hoster import Hoster + + +class Xdcc(Hoster): + __name__ = "Xdcc" + __version__ = "0.3" + __pattern__ = r'xdcc://.*?(/#?.*?)?/.*?/#?\d+/?' # xdcc://irc.Abjects.net/#channel/[XDCC]|Shit/#0004/ + __type__ = "hoster" + __config__ = [ + ("nick", "str", "Nickname", "pyload"), + ("ident", "str", "Ident", "pyloadident"), + ("realname", "str", "Realname", "pyloadreal") + ] + __description__ = """A Plugin that allows you to download from an IRC XDCC bot""" + __author_name__ = ("jeix") + __author_mail__ = ("jeix@hasnomail.com") + + def setup(self): + self.debug = 0 #0,1,2 + self.timeout = 30 + self.multiDL = False + + + + def process(self, pyfile): + # change request type + self.req = pyfile.m.core.requestFactory.getRequest(self.__name__, type="XDCC") + + self.pyfile = pyfile + for i in range(0,3): + try: + nmn = self.doDownload(pyfile.url) + self.log.debug("%s: Download of %s finished." % (self.__name__, nmn)) + return + except socket.error, e: + if hasattr(e, "errno"): + errno = e.errno + else: + errno = e.args[0] + + if errno in (10054,): + self.log.debug("XDCC: Server blocked our ip, retry in 5 min") + self.setWait(300) + self.wait() + continue + + self.fail("Failed due to socket errors. Code: %d" % errno) + + self.fail("Server blocked our ip, retry again later manually") + + + def doDownload(self, url): + self.pyfile.setStatus("waiting") # real link + + download_folder = self.config['general']['download_folder'] + location = join(download_folder, self.pyfile.package().folder.decode(sys.getfilesystemencoding())) + if not exists(location): + makedirs(location) + + m = re.search(r'xdcc://(.*?)/#?(.*?)/(.*?)/#?(\d+)/?', url) + server = m.group(1) + chan = m.group(2) + bot = m.group(3) + pack = m.group(4) + nick = self.getConf('nick') + ident = self.getConf('ident') + real = self.getConf('realname') + + temp = server.split(':') + ln = len(temp) + if ln == 2: + host, port = temp + elif ln == 1: + host, port = temp[0], 6667 + else: + self.fail("Invalid hostname for IRC Server (%s)" % server) + + + ####################### + # CONNECT TO IRC AND IDLE FOR REAL LINK + dl_time = time.time() + + sock = socket.socket() + sock.connect((host, int(port))) + if nick == "pyload": + nick = "pyload-%d" % (time.time() % 1000) # last 3 digits + sock.send("NICK %s\r\n" % nick) + sock.send("USER %s %s bla :%s\r\n" % (ident, host, real)) + time.sleep(3) + sock.send("JOIN #%s\r\n" % chan) + sock.send("PRIVMSG %s :xdcc send #%s\r\n" % (bot, pack)) + + # IRC recv loop + readbuffer = "" + done = False + retry = None + m = None + while True: + + # done is set if we got our real link + if done: + break + + if retry: + if time.time() > retry: + retry = None + dl_time = time.time() + sock.send("PRIVMSG %s :xdcc send #%s\r\n" % (bot, pack)) + + else: + if (dl_time + self.timeout) < time.time(): # todo: add in config + sock.send("QUIT :byebye\r\n") + sock.close() + self.fail("XDCC Bot did not answer") + + + fdset = select([sock], [], [], 0) + if sock not in fdset[0]: + continue + + readbuffer += sock.recv(1024) + temp = readbuffer.split("\n") + readbuffer = temp.pop() + + for line in temp: + if self.debug is 2: print "*> " + unicode(line, errors='ignore') + line = line.rstrip() + first = line.split() + + if first[0] == "PING": + sock.send("PONG %s\r\n" % first[1]) + + if first[0] == "ERROR": + self.fail("IRC-Error: %s" % line) + + msg = line.split(None, 3) + if len(msg) != 4: + continue + + msg = { \ + "origin":msg[0][1:], \ + "action":msg[1], \ + "target":msg[2], \ + "text" :msg[3][1:] \ + } + + + if nick == msg["target"][0:len(nick)] and "PRIVMSG" == msg["action"]: + if msg["text"] == "\x01VERSION\x01": + self.log.debug("XDCC: Sending CTCP VERSION.") + sock.send("NOTICE %s :%s\r\n" % (msg['origin'], "pyLoad! IRC Interface")) + elif msg["text"] == "\x01TIME\x01": + self.log.debug("Sending CTCP TIME.") + sock.send("NOTICE %s :%d\r\n" % (msg['origin'], time.time())) + elif msg["text"] == "\x01LAG\x01": + pass # don't know how to answer + + if not (bot == msg["origin"][0:len(bot)] + and nick == msg["target"][0:len(nick)] + and msg["action"] in ("PRIVMSG", "NOTICE")): + continue + + if self.debug is 1: + print "%s: %s" % (msg["origin"], msg["text"]) + + if "You already requested that pack" in msg["text"]: + retry = time.time() + 300 + + if "you must be on a known channel to request a pack" in msg["text"]: + self.fail("Wrong channel") + + m = re.match('\x01DCC SEND (.*?) (\d+) (\d+)(?: (\d+))?\x01', msg["text"]) + if m: + done = True + + # get connection data + ip = socket.inet_ntoa(struct.pack('L', socket.ntohl(int(m.group(2))))) + port = int(m.group(3)) + packname = m.group(1) + + if len(m.groups()) > 3: + self.req.filesize = int(m.group(4)) + + self.pyfile.name = packname + filename = save_join(location, packname) + self.log.info("XDCC: Downloading %s from %s:%d" % (packname, ip, port)) + + self.pyfile.setStatus("downloading") + newname = self.req.download(ip, port, filename, sock, self.pyfile.setProgress) + if newname and newname != filename: + self.log.info("%(name)s saved as %(newname)s" % {"name": self.pyfile.name, "newname": newname}) + filename = newname + + # kill IRC socket + # sock.send("QUIT :byebye\r\n") + sock.close() + + self.lastDownload = filename + return self.lastDownload + diff --git a/pyload/plugins/hoster/XvidstageCom.py b/pyload/plugins/hoster/XvidstageCom.py new file mode 100644 index 000000000..4962c05af --- /dev/null +++ b/pyload/plugins/hoster/XvidstageCom.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- + +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: 4Christopher +""" + +import re +import HTMLParser + +from module.plugins.Hoster import Hoster +from module.network.RequestFactory import getURL + + +def setup(self): + self.wantReconnect = False + self.resumeDownload = True + self.multiDL = True + + +def getInfo(urls): + result = [] + + for url in urls: + result.append(parseFileInfo(url, getInfoMode=True)) + yield result + + +def parseFileInfo(url, getInfoMode=False): + html = getURL(url) + info = {"name": url, "size": 0, "status": 3} + try: + info['name'] = re.search(r'(?:Filename|Dateiname):]*?>(.*?)<', html).group(1) + info['size'] = re.search(r'(?:Size|Größe):.*? \((\d+?) bytes\)', html).group(1) + except: ## The file is offline + info['status'] = 1 + else: + info['status'] = 2 + + if getInfoMode: + return info['name'], info['size'], info['status'], url + else: + return info['name'], info['size'], info['status'], html + + +class XvidstageCom(Hoster): + __name__ = 'XvidstageCom' + __version__ = '0.4' + __pattern__ = r'http://(?:www.)?xvidstage.com/(?P[0-9A-Za-z]+)' + __type__ = 'hoster' + __description__ = """A Plugin that allows you to download files from http://xvidstage.com""" + __author_name__ = ('4Christopher') + __author_mail__ = ('4Christopher@gmx.de') + + + def process(self, pyfile): + pyfile.name, pyfile.size, pyfile.status, self.html = parseFileInfo(pyfile.url) + self.logDebug('Name: %s' % pyfile.name) + if pyfile.status == 1: ## offline + self.offline() + self.id = re.search(self.__pattern__, pyfile.url).group('id') + + wait_sec = int(re.search(r'countdown_str">.+?>(\d+?)<', self.html).group(1)) + self.setWait(wait_sec, reconnect=False) + self.logDebug('Waiting %d seconds before submitting the captcha' % wait_sec) + self.wait() + + rand = re.search(r'', self.html).group(1) + self.logDebug('rand: %s, id: %s' % (rand, self.id)) + self.html = self.req.load(pyfile.url, + post={'op': 'download2', 'id': self.id, 'rand': rand, 'code': self.get_captcha()}) + file_url = re.search(r'(?P=url)', self.html).group('url') + try: + hours_file_available = int( + re.search(r'This direct link will be available for your IP next (?P\d+?) hours', + self.html).group('hours')) + self.logDebug( + 'You have %d hours to download this file with your current IP address.' % hours_file_available) + except: + self.logDebug('Failed') + self.logDebug('Download file: %s' % file_url) + self.download(file_url) + check = self.checkDownload({'empty': re.compile(r'^$')}) + + if check == 'empty': + self.logInfo('Downloaded File was empty') + # self.retry() + + def get_captcha(self): + ## 1 … + cap_chars = {} + for pad_left, char in re.findall(r"position:absolute;padding-left:(\d+?)px;.*?;'>(.*?)<", self.html): + cap_chars[int(pad_left)] = char + + h = HTMLParser.HTMLParser() + ## Sorting after padding-left + captcha = '' + for pad_left in sorted(cap_chars): + captcha += h.unescape(cap_chars[pad_left]) + + self.logDebug('The captcha is: %s' % captcha) + return captcha diff --git a/pyload/plugins/hoster/YibaishiwuCom.py b/pyload/plugins/hoster/YibaishiwuCom.py new file mode 100644 index 000000000..901225944 --- /dev/null +++ b/pyload/plugins/hoster/YibaishiwuCom.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: zoidberg +""" + +import re +from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo +from module.common.json_layer import json_loads + +class YibaishiwuCom(SimpleHoster): + __name__ = "YibaishiwuCom" + __type__ = "hoster" + __pattern__ = r"http://(?:www\.)?(?:u\.)?115.com/file/(?P\w+)" + __version__ = "0.12" + __description__ = """115.com""" + __author_name__ = ("zoidberg") + + FILE_NAME_PATTERN = r"file_name: '(?P[^']+)'" + FILE_SIZE_PATTERN = r"file_size: '(?P[^']+)'" + FILE_OFFLINE_PATTERN = ur'

    哎呀!提取码不存在!不妨搜搜看吧!

    ' + + AJAX_URL_PATTERN = r'(/\?ct=(pickcode|download)[^"\']+)' + + def handleFree(self): + found = re.search(self.AJAX_URL_PATTERN, self.html) + if not found: self.parseError("AJAX URL") + url = found.group(1) + self.logDebug(('FREEUSER' if found.group(2) == 'download' else 'GUEST') + ' URL', url) + + response = json_loads(self.load("http://115.com" + url, decode = False)) + for mirror in (response['urls'] if 'urls' in response else response['data'] if 'data' in response else []): + try: + url = mirror['url'].replace('\\','') + self.logDebug("Trying URL: " + url) + self.download(url) + break + except: + continue + else: self.fail('No working link found') + +getInfo = create_getInfo(YibaishiwuCom) diff --git a/pyload/plugins/hoster/YoupornCom.py b/pyload/plugins/hoster/YoupornCom.py new file mode 100644 index 000000000..b17a4ef80 --- /dev/null +++ b/pyload/plugins/hoster/YoupornCom.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import re +from module.plugins.Hoster import Hoster + +class YoupornCom(Hoster): + __name__ = "YoupornCom" + __type__ = "hoster" + __pattern__ = r"http://(www\.)?youporn\.com/watch/.+" + __version__ = "0.2" + __description__ = """Youporn.com Video Download Hoster""" + __author_name__ = ("willnix") + __author_mail__ = ("willnix@pyload.org") + + def setup(self): + self.html = None + + def process(self, pyfile): + self.pyfile = pyfile + + if not self.file_exists(): + self.offline() + + self.pyfile.name = self.get_file_name() + self.download(self.get_file_url()) + + def download_html(self): + url = self.pyfile.url + self.html = self.load(url, post={"user_choice":"Enter"}, cookies=False) + + def get_file_url(self): + """ returns the absolute downloadable filepath + """ + if self.html is None: + self.download_html() + + file_url = re.search(r'(http://download\.youporn\.com/download/\d+\?save=1)">', self.html).group(1) + return file_url + + def get_file_name(self): + if self.html is None: + self.download_html() + + file_name_pattern = r"(.*) - Free Porn Videos - YouPorn" + return re.search(file_name_pattern, self.html).group(1).replace("&", "&").replace("/","") + '.flv' + + def file_exists(self): + """ returns True or False + """ + if self.html is None: + self.download_html() + if re.search(r"(.*invalid video_id.*)", self.html) is not None: + return False + else: + return True diff --git a/pyload/plugins/hoster/YourfilesTo.py b/pyload/plugins/hoster/YourfilesTo.py new file mode 100644 index 000000000..7a8f327ca --- /dev/null +++ b/pyload/plugins/hoster/YourfilesTo.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import re +import urllib +from module.plugins.Hoster import Hoster + +class YourfilesTo(Hoster): + __name__ = "YourfilesTo" + __type__ = "hoster" + __pattern__ = r"(http://)?(www\.)?yourfiles\.(to|biz)/\?d=[a-zA-Z0-9]+" + __version__ = "0.2" + __description__ = """Youfiles.to Download Hoster""" + __author_name__ = ("jeix", "skydancer") + __author_mail__ = ("jeix@hasnomail.de", "skydancer@hasnomail.de") + + def setup(self): + self.html = None + self.multiDL = True + + def process(self,pyfile): + self.pyfile = pyfile + self.prepare() + self.download(self.get_file_url()) + + def prepare(self): + if not self.file_exists(): + self.offline() + + self.pyfile.name = self.get_file_name() + + wait_time = self.get_waiting_time() + self.setWait(wait_time) + self.log.debug("%s: Waiting %d seconds." % (self.__name__,wait_time)) + self.wait() + + def get_waiting_time(self): + if self.html is None: + self.download_html() + + #var zzipitime = 15; + m = re.search(r'var zzipitime = (\d+);', self.html) + if m: + sec = int(m.group(1)) + else: + sec = 0 + + return sec + + def download_html(self): + url = self.pyfile.url + self.html = self.load(url) + + def get_file_url(self): + """ returns the absolute downloadable filepath + """ + url = re.search(r"var bla = '(.*?)';", self.html) + if url: + url = url.group(1) + url = urllib.unquote(url.replace("http://http:/http://", "http://").replace("dumdidum", "")) + return url + else: + self.fail("absolute filepath could not be found. offline? ") + + def get_file_name(self): + if self.html is None: + self.download_html() + + return re.search("(.*)", self.html).group(1) + + def file_exists(self): + """ returns True or False + """ + if self.html is None: + self.download_html() + + if re.search(r"HTTP Status 404", self.html) is not None: + return False + else: + return True + + + diff --git a/pyload/plugins/hoster/YoutubeCom.py b/pyload/plugins/hoster/YoutubeCom.py new file mode 100644 index 000000000..70db597cf --- /dev/null +++ b/pyload/plugins/hoster/YoutubeCom.py @@ -0,0 +1,164 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import re +import subprocess +import os +import os.path +from urllib import unquote + +from module.utils import html_unescape +from module.plugins.Hoster import Hoster + +def which(program): + """Works exactly like the unix command which + + Courtesy of http://stackoverflow.com/a/377028/675646""" + def is_exe(fpath): + return os.path.isfile(fpath) and os.access(fpath, os.X_OK) + + fpath, fname = os.path.split(program) + if fpath: + if is_exe(program): + return program + else: + for path in os.environ["PATH"].split(os.pathsep): + path = path.strip('"') + exe_file = os.path.join(path, program) + if is_exe(exe_file): + return exe_file + + return None + +class YoutubeCom(Hoster): + __name__ = "YoutubeCom" + __type__ = "hoster" + __pattern__ = r"https?://(?:[^/]*?)youtube\.com/watch.*?[?&]v=.*" + __version__ = "0.32" + __config__ = [("quality", "sd;hd;fullhd;240p;360p;480p;720p;1080p;3072p", "Quality Setting", "hd"), + ("fmt", "int", "FMT/ITAG Number (5-102, 0 for auto)", 0), + (".mp4", "bool", "Allow .mp4", True), + (".flv", "bool", "Allow .flv", True), + (".webm", "bool", "Allow .webm", False), + (".3gp", "bool", "Allow .3gp", False), + ("3d", "bool", "Prefer 3D", False)] + __description__ = """Youtube.com Video Download Hoster""" + __author_name__ = ("spoob", "zoidberg") + __author_mail__ = ("spoob@pyload.org", "zoidberg@mujmail.cz") + + # name, width, height, quality ranking, 3D + formats = {5: (".flv", 400, 240, 1, False), + 6: (".flv", 640, 400, 4, False), + 17: (".3gp", 176, 144, 0, False), + 18: (".mp4", 480, 360, 2, False), + 22: (".mp4", 1280, 720, 8, False), + 43: (".webm", 640, 360, 3, False), + 34: (".flv", 640, 360, 4, False), + 35: (".flv", 854, 480, 6, False), + 36: (".3gp", 400, 240, 1, False), + 37: (".mp4", 1920, 1080, 9, False), + 38: (".mp4", 4096, 3072, 10, False), + 44: (".webm", 854, 480, 5, False), + 45: (".webm", 1280, 720, 7, False), + 46: (".webm", 1920, 1080, 9, False), + 82: (".mp4", 640, 360, 3, True), + 83: (".mp4", 400, 240, 1, True), + 84: (".mp4", 1280, 720, 8, True), + 85: (".mp4", 1920, 1080, 9, True), + 100: (".webm", 640, 360, 3, True), + 101: (".webm", 640, 360, 4, True), + 102: (".webm", 1280, 720, 8, True) + } + + def setup(self): + self.resumeDownload = self.multiDL = True + + def process(self, pyfile): + html = self.load(pyfile.url, decode=True) + + if '

    ' in html: + self.offline() + + if "We have been receiving a large volume of requests from your network." in html: + self.tempOffline() + + #get config + use3d = self.getConf("3d") + if use3d: + quality = {"sd":82,"hd":84,"fullhd":85,"240p":83,"360p":82,"480p":82,"720p":84,"1080p":85,"3072p":85} + else: + quality = {"sd":18,"hd":22,"fullhd":37,"240p":5,"360p":18,"480p":35,"720p":22,"1080p":37,"3072p":38} + desired_fmt = self.getConf("fmt") + if desired_fmt and desired_fmt not in self.formats: + self.logWarning("FMT %d unknown - using default." % desired_fmt) + desired_fmt = 0 + if not desired_fmt: + desired_fmt = quality.get(self.getConf("quality"), 18) + + #parse available streams + streams = re.search(r'"url_encoded_fmt_stream_map": "(.*?)",', html).group(1) + streams = [x.split('\u0026') for x in streams.split(',')] + streams = [dict((y.split('=',1)) for y in x) for x in streams] + streams = [(int(x['itag']), "%s&signature=%s" % (unquote(x['url']), x['sig'])) for x in streams] + #self.logDebug("Found links: %s" % streams) + self.logDebug("AVAILABLE STREAMS: %s" % [x[0] for x in streams]) + + #build dictionary of supported itags (3D/2D) + allowed = lambda x: self.getConfig(self.formats[x][0]) + streams = [x for x in streams if x[0] in self.formats and allowed(x[0])] + if not streams: + self.fail("No available stream meets your preferences") + fmt_dict = dict([x for x in streams if self.formats[x[0]][4] == use3d] or streams) + + self.logDebug("DESIRED STREAM: ITAG:%d (%s) %sfound, %sallowed" % + (desired_fmt, + "%s %dx%d Q:%d 3D:%s" % self.formats[desired_fmt], + "" if desired_fmt in fmt_dict else "NOT ", + "" if allowed(desired_fmt) else "NOT ") + ) + + #return fmt nearest to quality index + if desired_fmt in fmt_dict and allowed(desired_fmt): + fmt = desired_fmt + else: + sel = lambda x: self.formats[x][3] #select quality index + comp = lambda x, y: abs(sel(x) - sel(y)) + + self.logDebug("Choosing nearest fmt: %s" % [(x, allowed(x), comp(x, desired_fmt)) for x in fmt_dict.keys()]) + fmt = reduce(lambda x, y: x if comp(x, desired_fmt) <= comp(y, desired_fmt) and + sel(x) > sel(y) else y, fmt_dict.keys()) + + self.logDebug("Chosen fmt: %s" % fmt) + url = fmt_dict[fmt] + self.logDebug("URL: %s" % url) + + #set file name + file_suffix = self.formats[fmt][0] if fmt in self.formats else ".flv" + file_name_pattern = '' + name = re.search(file_name_pattern, html).group(1).replace("/", "") + pyfile.name = html_unescape(name) + + time = re.search(r"t=((\d+)m)?(\d+)s", pyfile.url) + ffmpeg = which("ffmpeg") + if ffmpeg and time: + m, s = time.groups()[1:] + if not m: + m = "0" + + pyfile.name += " (starting at %s:%s)" % (m, s) + pyfile.name += file_suffix + + filename = self.download(url) + + if ffmpeg and time: + inputfile = filename + "_" + os.rename(filename, inputfile) + + subprocess.call([ + ffmpeg, + "-ss", "00:%s:%s" % (m, s), + "-i", inputfile, + "-vcodec", "copy", + "-acodec", "copy", + filename]) + os.remove(inputfile) diff --git a/pyload/plugins/hoster/ZDF.py b/pyload/plugins/hoster/ZDF.py new file mode 100644 index 000000000..ea45f4fd8 --- /dev/null +++ b/pyload/plugins/hoster/ZDF.py @@ -0,0 +1,46 @@ + +import re +from xml.etree.ElementTree import fromstring + +from module.plugins.Hoster import Hoster + +XML_API = "http://www.zdf.de/ZDFmediathek/xmlservice/web/beitragsDetails?id=%i" + +class ZDF(Hoster): + # Based on zdfm by Roland Beermann + # http://github.com/enkore/zdfm/ + __name__ = "ZDF Mediathek" + __version__ = "0.7" + __pattern__ = r"http://www\.zdf\.de/ZDFmediathek/[^0-9]*([0-9]+)[^0-9]*" + __config__ = [] + + @staticmethod + def video_key(video): + return ( + int(video.findtext("videoBitrate", "0")), + any(f.text == "progressive" for f in video.iter("facet")), + ) + + @staticmethod + def video_valid(video): + return (video.findtext("url").startswith("http") and video.findtext("url").endswith(".mp4")) + + @staticmethod + def get_id(url): + return int(re.search(r"[^0-9]*([0-9]+)[^0-9]*", url).group(1)) + + def process(self, pyfile): + xml = fromstring(self.load(XML_API % self.get_id(pyfile.url))) + + status = xml.findtext("./status/statuscode") + if status != "ok": + self.fail("Error retrieving manifest.") + + video = xml.find("video") + title = video.findtext("information/title") + + pyfile.name = title + + target_url = sorted((v for v in video.iter("formitaet") if self.video_valid(v)), key=self.video_key)[-1].findtext("url") + + self.download(target_url) diff --git a/pyload/plugins/hoster/ZeveraCom.py b/pyload/plugins/hoster/ZeveraCom.py new file mode 100644 index 000000000..8be725d2f --- /dev/null +++ b/pyload/plugins/hoster/ZeveraCom.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from module.plugins.Hoster import Hoster +from module.utils import html_unescape +from urllib import quote, unquote +from time import sleep + +class ZeveraCom(Hoster): + __name__ = "ZeveraCom" + __version__ = "0.20" + __type__ = "hoster" + __pattern__ = r"http://zevera.com/.*" + __description__ = """zevera.com hoster plugin""" + __author_name__ = ("zoidberg") + __author_mail__ = ("zoidberg@mujmail.cz") + + def setup(self): + self.resumeDownload = self.multiDL = True + self.chunkLimit = 1 + + def process(self, pyfile): + if not self.account: + self.logError(_("Please enter your zevera.com account or deactivate this plugin")) + self.fail("No zevera.com account provided") + + self.logDebug("zevera.com: Old URL: %s" % pyfile.url) + + if self.account.getAPIData(self.req, cmd = "checklink", olink = pyfile.url) != "Alive": + self.fail("Offline or not downloadable - contact Zevera support") + + header = self.account.getAPIData(self.req, just_header = True, cmd="generatedownloaddirect", olink = pyfile.url) + if not "location" in header: + self.fail("Unable to initialize download - contact Zevera support") + + self.download(header['location'], disposition = True) + + check = self.checkDownload({"error" : 'action="ErrorDownload.aspx'}) + if check == "error": + self.fail("Error response received - contact Zevera support") + + """ + # BitAPI not used - defunct, probably abandoned by Zevera + + api_url = "http://zevera.com/API.ashx" + + def process(self, pyfile): + if not self.account: + self.logError(_("Please enter your zevera.com account or deactivate this plugin")) + self.fail("No zevera.com account provided") + + self.logDebug("zevera.com: Old URL: %s" % pyfile.url) + + last_size = retries = 0 + olink = self.pyfile.url #quote(self.pyfile.url.encode('utf_8')) + + for i in range(100): + self.retData = self.account.loadAPIRequest(self.req, cmd = 'download_request', olink = olink) + self.checkAPIErrors(self.retData) + + if self.retData['FileInfo']['StatusID'] == 100: + break + elif self.retData['FileInfo']['StatusID'] == 99: + self.fail('Failed to initialize download (99)') + else: + if self.retData['FileInfo']['Progress']['BytesReceived'] <= last_size: + if retries >= 6: + self.fail('Failed to initialize download (%d)' % self.retData['FileInfo']['StatusID'] ) + retries += 1 + else: + retries = 0 + + last_size = self.retData['FileInfo']['Progress']['BytesReceived'] + + self.setWait(self.retData['Update_Wait']) + self.wait() + + pyfile.name = self.retData['FileInfo']['RealFileName'] + pyfile.size = self.retData['FileInfo']['FileSizeInBytes'] + + self.retData = self.account.loadAPIRequest(self.req, cmd = 'download_start', FileID = self.retData['FileInfo']['FileID']) + self.checkAPIErrors(self.retData) + + self.download(self.api_url, get = { + 'cmd': "open_stream", + 'login': self.account.loginname, + 'pass': self.account.password, + 'FileID': self.retData['FileInfo']['FileID'], + 'startBytes': 0 + } + ) + + def checkAPIErrors(self, retData): + if not retData: + self.fail('Unknown API response') + + if retData['ErrorCode']: + self.logError(retData['ErrorCode'], retData['ErrorMessage']) + #self.fail('ERROR: ' + retData['ErrorMessage']) + + if self.pyfile.size / 1024000 > retData['AccountInfo']['AvailableTODAYTrafficForUseInMBytes']: + self.logWarning("Not enough data left to download the file") + + def crazyDecode(self, ustring): + # accepts decoded ie. unicode string - API response is double-quoted, double-utf8-encoded + # no idea what the proper order of calling these functions would be :-/ + return html_unescape(unquote(unquote(ustring.replace('@DELIMITER@','#'))).encode('raw_unicode_escape').decode('utf-8')) + """ \ No newline at end of file diff --git a/pyload/plugins/hoster/ZippyshareCom.py b/pyload/plugins/hoster/ZippyshareCom.py new file mode 100644 index 000000000..84974e7ba --- /dev/null +++ b/pyload/plugins/hoster/ZippyshareCom.py @@ -0,0 +1,187 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import re, subprocess, tempfile, os +from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo, timestamp +from module.plugins.ReCaptcha import ReCaptcha +from module.common.json_layer import json_loads + +class ZippyshareCom(SimpleHoster): + __name__ = "ZippyshareCom" + __type__ = "hoster" + __pattern__ = r"(?Phttp://www\d{0,2}\.zippyshare.com)/v(?:/|iew.jsp.*key=)(?P\d+)" + __version__ = "0.37" + __description__ = """Zippyshare.com Download Hoster""" + __author_name__ = ("spoob", "zoidberg") + __author_mail__ = ("spoob@pyload.org", "zoidberg@mujmail.cz") + __config__ = [("swfdump_path", "string", "Path to swfdump", "")] + + FILE_NAME_PATTERN = r'>Name:\s*]*>(?P[^<]+)
    ' + FILE_SIZE_PATTERN = r'>Size:\s*]*>(?P[0-9.,]+) (?P[kKMG]+)i?B
    ' + FILE_INFO_PATTERN = r'document\.getElementById\(\'dlbutton\'\)\.href = "[^;]*/(?P[^"]+)";' + FILE_OFFLINE_PATTERN = r'>File does not exist on this server

    ' + + DOWNLOAD_URL_PATTERN = r" + + + + + + +
    +
    +
    + +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + +
    +
    +

    Powered by

    +
    + Bootstrap
    +
    + +
    +

    pyLoad

    +
    + dsfdsf
    +
    + +
    +

    Community

    +
    + asd
    +
    + +
    +

    Development

    +
    + asd
    +
    +
    +
    +
    + + + diff --git a/pyload/web/app/scripts/app.js b/pyload/web/app/scripts/app.js new file mode 100644 index 000000000..427cb1bc8 --- /dev/null +++ b/pyload/web/app/scripts/app.js @@ -0,0 +1,105 @@ +/* + * Global Application Object + * Contains all necessary logic shared across views + */ +define([ + + // Libraries. + 'jquery', + 'underscore', + 'backbone', + 'utils/initHB', + 'utils/animations', + 'utils/lazyRequire', + 'utils/dialogs', + 'marionette', + 'bootstrap', + 'animate' + +], function($, _, Backbone, Handlebars) { + 'use strict'; + + Backbone.Marionette.TemplateCache.prototype.compileTemplate = function(rawTemplate) { + return Handlebars.compile(rawTemplate); + }; + + // TODO: configurable root + var App = new Backbone.Marionette.Application({ + root: '/' + }); + + App.addRegions({ + header: '#header', + notification: '#notification-area', + selection: '#selection-area', + content: '#content', + actionbar: '#actionbar' + }); + + App.navigate = function(url) { + return Backbone.history.navigate(url, true); + }; + + App.apiUrl = function(path) { + var url = window.hostProtocol + window.hostAddress + ':' + window.hostPort + window.pathPrefix + path; + return url; + }; + + // Add Global Helper functions + // Generates options dict that can be used for xhr requests + App.apiRequest = function(method, data, options) { + options || (options = {}); + options.url = App.apiUrl('api/' + method); + options.dataType = 'json'; + + if (data) { + options.type = 'POST'; + options.data = {}; + // Convert arguments to json + _.keys(data).map(function(key) { + options.data[key] = JSON.stringify(data[key]); + }); + } + + return options; + }; + + App.setTitle = function(name) { + var title = window.document.title; + var newTitle; + // page name separator + var index = title.indexOf('-'); + if (index >= 0) + newTitle = name + ' - ' + title.substr(index + 2, title.length); + else + newTitle = name + ' - ' + title; + + window.document.title = newTitle; + }; + + App.openWebSocket = function(path) { + // TODO + return new WebSocket(window.wsAddress.replace('%s', window.hostAddress) + path); + }; + + App.on('initialize:after', function() { +// TODO pushState variable + Backbone.history.start({ + pushState: false, + root: App.root + }); + + // All links should be handled by backbone + $(document).on('click', 'a[data-nav]', function(evt) { + var href = { prop: $(this).prop('href'), attr: $(this).attr('href') }; + var root = location.protocol + '//' + location.host + App.root; + if (href.prop.slice(0, root.length) === root) { + evt.preventDefault(); + Backbone.history.navigate(href.attr, true); + } + }); + }); + + // Returns the app object to be available to other modules through require.js. + return App; +}); \ No newline at end of file diff --git a/pyload/web/app/scripts/collections/AccountList.js b/pyload/web/app/scripts/collections/AccountList.js new file mode 100644 index 000000000..bfc2af5a3 --- /dev/null +++ b/pyload/web/app/scripts/collections/AccountList.js @@ -0,0 +1,24 @@ +define(['jquery', 'backbone', 'underscore', 'app', 'models/Account'], function($, Backbone, _, App, Account) { + 'use strict'; + + return Backbone.Collection.extend({ + + model: Account, + + comparator: function(account) { + return account.get('plugin'); + }, + + initialize: function() { + + }, + + fetch: function(options) { + // TODO: refresh options? + options = App.apiRequest('getAccounts/false', null, options); + return Backbone.Collection.prototype.fetch.call(this, options); + } + + }); + +}); \ No newline at end of file diff --git a/pyload/web/app/scripts/collections/FileList.js b/pyload/web/app/scripts/collections/FileList.js new file mode 100644 index 000000000..873f4c0e3 --- /dev/null +++ b/pyload/web/app/scripts/collections/FileList.js @@ -0,0 +1,18 @@ +define(['jquery', 'backbone', 'underscore', 'models/File'], function($, Backbone, _, File) { + 'use strict'; + + return Backbone.Collection.extend({ + + model: File, + + comparator: function(file) { + return file.get('fileorder'); + }, + + initialize: function() { + + } + + }); + +}); \ No newline at end of file diff --git a/pyload/web/app/scripts/collections/InteractionList.js b/pyload/web/app/scripts/collections/InteractionList.js new file mode 100644 index 000000000..24f8b9248 --- /dev/null +++ b/pyload/web/app/scripts/collections/InteractionList.js @@ -0,0 +1,49 @@ +define(['jquery', 'backbone', 'underscore', 'app', 'models/InteractionTask'], + function($, Backbone, _, App, InteractionTask) { + 'use strict'; + + return Backbone.Collection.extend({ + + model: InteractionTask, + + comparator: function(task) { + return task.get('iid'); + }, + + fetch: function(options) { + options = App.apiRequest('getInteractionTasks/0', null, options); + var self = this; + options.success = function(data) { + self.set(data); + }; + + return $.ajax(options); + }, + + toJSON: function() { + var data = {queries: 0, notifications: 0}; + + this.map(function(task) { + if (task.isNotification()) + data.notifications++; + else + data.queries++; + }); + + return data; + }, + + // a task is waiting for attention (no notification) + hasTaskWaiting: function() { + var tasks = 0; + this.map(function(task) { + if (!task.isNotification()) + tasks++; + }); + + return tasks > 0; + } + + }); + + }); \ No newline at end of file diff --git a/pyload/web/app/scripts/collections/PackageList.js b/pyload/web/app/scripts/collections/PackageList.js new file mode 100644 index 000000000..7bee861a4 --- /dev/null +++ b/pyload/web/app/scripts/collections/PackageList.js @@ -0,0 +1,16 @@ +define(['jquery', 'backbone', 'underscore', 'models/Package'], function($, Backbone, _, Package) { + 'use strict'; + + return Backbone.Collection.extend({ + + model: Package, + + comparator: function(pack) { + return pack.get('packageorder'); + }, + + initialize: function() { + } + + }); +}); \ No newline at end of file diff --git a/pyload/web/app/scripts/collections/ProgressList.js b/pyload/web/app/scripts/collections/ProgressList.js new file mode 100644 index 000000000..51849d8de --- /dev/null +++ b/pyload/web/app/scripts/collections/ProgressList.js @@ -0,0 +1,18 @@ +define(['jquery', 'backbone', 'underscore', 'models/Progress'], function($, Backbone, _, Progress) { + 'use strict'; + + return Backbone.Collection.extend({ + + model: Progress, + + comparator: function(progress) { + return progress.get('eta'); + }, + + initialize: function() { + + } + + }); + +}); \ No newline at end of file diff --git a/pyload/web/app/scripts/config.js b/pyload/web/app/scripts/config.js new file mode 100644 index 000000000..398d97e11 --- /dev/null +++ b/pyload/web/app/scripts/config.js @@ -0,0 +1,73 @@ +// Sets the require.js configuration for your application. +'use strict'; +require.config({ + + deps: ['default'], + + paths: { + + jquery: '../components/jquery/jquery', + flot: '../components/flot/jquery.flot', + transit: '../components/jquery.transit/jquery.transit', + animate: '../components/jquery.animate-enhanced/scripts/src/jquery.animate-enhanced', + cookie: '../components/jquery.cookie/jquery.cookie', + omniwindow: 'vendor/jquery.omniwindow', + select2: '../components/select2/select2', + bootstrap: 'vendor/bootstrap-2.3.2', + underscore: '../components/underscore/underscore', + backbone: '../components/backbone/backbone', + marionette: '../components/backbone.marionette/lib/backbone.marionette', +// handlebars: '../components/handlebars.js/dist/handlebars', + handlebars: 'vendor/Handlebars-1.0rc1', + jed: '../components/jed/jed', + + // TODO: Two hbs dependencies could be replaced + i18nprecompile: '../components/require-handlebars-plugin/hbs/i18nprecompile', + json2: '../components/require-handlebars-plugin/hbs/json2', + + // Plugins + text: '../components/requirejs-text/text', + hbs: '../components/require-handlebars-plugin/hbs', + + // Shortcut + tpl: '../templates/default' + }, + + hbs: { + disableI18n: true, + helperPathCallback: // Callback to determine the path to look for helpers + function(name) { + // Some helpers are accumulated into one file + if (name.indexOf('file') === 0) + name = 'fileHelper'; + + return 'helpers/' + name; + }, + templateExtension: 'html' + }, + + // Sets the configuration for your third party scripts that are not AMD compatible + shim: { + underscore: { + exports: '_' + }, + + backbone: { + deps: ['underscore', 'jquery'], + exports: 'Backbone' + }, + + marionette: ['backbone'], +// handlebars: { +// exports: 'Handlebars' +// }, + + flot: ['jquery'], + transit: ['jquery'], + cookie: ['jquery'], + omniwindow: ['jquery'], + select2: ['jquery'], + bootstrap: ['jquery'], + animate: ['jquery'] + } +}); \ No newline at end of file diff --git a/pyload/web/app/scripts/controller.js b/pyload/web/app/scripts/controller.js new file mode 100644 index 000000000..05237914d --- /dev/null +++ b/pyload/web/app/scripts/controller.js @@ -0,0 +1,67 @@ +define([ + 'app', + 'backbone', + + // Views + 'views/headerView', + 'views/notificationView', + 'views/dashboard/dashboardView', + 'views/dashboard/selectionView', + 'views/dashboard/filterView', + 'views/loginView', + 'views/settings/settingsView', + 'views/accounts/accountListView' +], function( + App, Backbone, HeaderView, NotificationView, DashboardView, SelectionView, FilterView, LoginView, SettingsView, AccountListView) { + 'use strict'; + // TODO some views does not need to be loaded instantly + + return { + + header: function() { + if (!App.header.currentView) { + App.header.show(new HeaderView()); + App.header.currentView.init(); + App.notification.attachView(new NotificationView()); + } + }, + + dashboard: function() { + this.header(); + + App.actionbar.show(new FilterView()); + // TODO: not completly visible after reattaching + App.selection.attachView(new SelectionView()); + App.content.show(new DashboardView()); + }, + + login: function() { + App.content.show(new LoginView()); + }, + + logout: function() { + alert('Not implemented'); + }, + + settings: function() { + this.header(); + + var view = new SettingsView(); + App.actionbar.show(new view.actionbar()); + App.content.show(view); + }, + + accounts: function() { + this.header(); + + var view = new AccountListView(); + App.actionbar.show(new view.actionbar()); + App.content.show(view); + }, + + admin: function() { + alert('Not implemented'); + } + }; + +}); diff --git a/pyload/web/app/scripts/default.js b/pyload/web/app/scripts/default.js new file mode 100644 index 000000000..a337cee21 --- /dev/null +++ b/pyload/web/app/scripts/default.js @@ -0,0 +1,30 @@ +define('default', ['backbone', 'jquery', 'app', 'router', 'models/userSession'], + function(Backbone, $, App, Router, UserSession) { + 'use strict'; + + // Global ajax options + var options = { + statusCode: { + 401: function() { + console.log('Not logged in.'); + App.navigate('login'); + } + }, + xhrFields: {withCredentials: true} + }; + + $.ajaxSetup(options); + + Backbone.ajax = function() { + Backbone.$.ajaxSetup.call(Backbone.$, options); + return Backbone.$.ajax.apply(Backbone.$, arguments); + }; + + $(function() { + App.session = new UserSession(); + App.router = new Router(); + App.start(); + }); + + return App; + }); \ No newline at end of file diff --git a/pyload/web/app/scripts/helpers/fileHelper.js b/pyload/web/app/scripts/helpers/fileHelper.js new file mode 100644 index 000000000..156be58f0 --- /dev/null +++ b/pyload/web/app/scripts/helpers/fileHelper.js @@ -0,0 +1,55 @@ +// Helpers to render the file view +define('helpers/fileHelper', ['handlebars', 'utils/apitypes', 'helpers/formatTime'], + function(Handlebars, Api, formatTime) { + 'use strict'; + + function fileClass(file, options) { + if (file.finished) + return 'finished'; + else if (file.failed) + return 'failed'; + else if (file.offline) + return 'offline'; + else if (file.online) + return 'online'; + else if (file.waiting) + return 'waiting'; + else if (file.downloading) + return 'downloading'; + + return ''; + } + + // TODO + function fileIcon(media, options) { + return 'icon-music'; + } + + // TODO rest of the states + function fileStatus(file, options) { + var s; + var msg = file.download.statusmsg; + + if (file.failed) { + s = ' '; + if (file.download.error) + s += file.download.error; + else s += msg; + } else if (file.finished) + s = ' ' + msg; + else if (file.downloading) + s = '
      ' + + formatTime(file.eta) + '
    '; + else if (file.waiting) + s = ' ' + formatTime(file.eta); + else + s = msg; + + return new Handlebars.SafeString(s); + } + + Handlebars.registerHelper('fileClass', fileClass); + Handlebars.registerHelper('fileIcon', fileIcon); + Handlebars.registerHelper('fileStatus', fileStatus); + return fileClass; + }); \ No newline at end of file diff --git a/pyload/web/app/scripts/helpers/formatSize.js b/pyload/web/app/scripts/helpers/formatSize.js new file mode 100644 index 000000000..3b62e74c7 --- /dev/null +++ b/pyload/web/app/scripts/helpers/formatSize.js @@ -0,0 +1,15 @@ +// Format bytes in human readable format +define('helpers/formatSize', ['handlebars'], function(Handlebars) { + 'use strict'; + + var sizes = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB']; + function formatSize(bytes, options) { + if (!bytes || bytes === 0) return '0 B'; + var i = parseInt(Math.floor(Math.log(bytes) / Math.log(1024)), 10); + // round to two digits + return (bytes / Math.pow(1024, i)).toFixed(2) + ' ' + sizes[i]; + } + + Handlebars.registerHelper('formatSize', formatSize); + return formatSize; +}); \ No newline at end of file diff --git a/pyload/web/app/scripts/helpers/formatTime.js b/pyload/web/app/scripts/helpers/formatTime.js new file mode 100644 index 000000000..757ff73ad --- /dev/null +++ b/pyload/web/app/scripts/helpers/formatTime.js @@ -0,0 +1,17 @@ +// Format bytes in human readable format +define('helpers/formatTime', ['handlebars', 'vendor/remaining'], function(Handlebars, Remaining) { + 'use strict'; + + function formatTime(seconds, options) { + if (seconds === Infinity) + return '∞'; + else if (!seconds || seconds <= 0) + return '-'; + + // TODO: digital or written string + return Remaining.getStringDigital(seconds, window.dates); + } + + Handlebars.registerHelper('formatTime', formatTime); + return formatTime; +}); \ No newline at end of file diff --git a/pyload/web/app/scripts/helpers/pluginIcon.js b/pyload/web/app/scripts/helpers/pluginIcon.js new file mode 100644 index 000000000..6b2fdc67f --- /dev/null +++ b/pyload/web/app/scripts/helpers/pluginIcon.js @@ -0,0 +1,14 @@ +// Resolves name of plugin to icon path +define('helpers/pluginIcon', ['handlebars', 'app'], function(Handlebars, App) { + 'use strict'; + + function pluginIcon(name) { + if (typeof name === 'object' && typeof name.get === 'function') + name = name.get('plugin'); + + return App.apiUrl('icons/' + name); + } + + Handlebars.registerHelper('pluginIcon', pluginIcon); + return pluginIcon; +}); \ No newline at end of file diff --git a/pyload/web/app/scripts/models/Account.js b/pyload/web/app/scripts/models/Account.js new file mode 100644 index 000000000..a2e24b056 --- /dev/null +++ b/pyload/web/app/scripts/models/Account.js @@ -0,0 +1,51 @@ +define(['jquery', 'backbone', 'underscore', 'app', 'utils/apitypes'], function($, Backbone, _, App, Api) { + 'use strict'; + + return Backbone.Model.extend({ + + // TODO + // generated, not submitted + idAttribute: 'user', + + defaults: { + plugin: null, + loginname: null, + owner: -1, + valid: false, + validuntil: -1, + trafficleft: -1, + maxtraffic: -1, + premium: false, + activated: false, + shared: false, + options: null + }, + + // Model Constructor + initialize: function() { + }, + + // Any time a model attribute is set, this method is called + validate: function(attrs) { + + }, + + save: function(options) { + options = App.apiRequest('updateAccountInfo', {account: this.toJSON()}, options); + return $.ajax(options); + }, + + destroy: function(options) { + options = App.apiRequest('removeAccount', {account: this.toJSON()}, options); + var self = this; + options.success = function() { + self.trigger('destroy', self, self.collection, options); + }; + + // TODO request is not dispatched +// return Backbone.Model.prototype.destroy.call(this, options); + return $.ajax(options); + } + }); + +}); \ No newline at end of file diff --git a/pyload/web/app/scripts/models/ConfigHolder.js b/pyload/web/app/scripts/models/ConfigHolder.js new file mode 100644 index 000000000..40efbc7c0 --- /dev/null +++ b/pyload/web/app/scripts/models/ConfigHolder.js @@ -0,0 +1,68 @@ +define(['jquery', 'backbone', 'underscore', 'app', './ConfigItem'], + function($, Backbone, _, App, ConfigItem) { + 'use strict'; + + return Backbone.Model.extend({ + + defaults: { + name: '', + label: '', + description: '', + long_description: null, + // simple list but no collection + items: null, + info: null + }, + + // Model Constructor + initialize: function() { + + }, + + // Loads it from server by name + fetch: function(options) { + options = App.apiRequest('loadConfig/"' + this.get('name') + '"', null, options); + return Backbone.Model.prototype.fetch.call(this, options); + }, + + save: function(options) { + var config = this.toJSON(); + var items = []; + // Convert changed items to json + _.each(config.items, function(item) { + if (item.isChanged()) { + items.push(item.prepareSave()); + } + }); + config.items = items; + // TODO: only set new values on success + + options = App.apiRequest('saveConfig', {config: config}, options); + + return $.ajax(options); + }, + + parse: function(resp) { + // Create item models + resp.items = _.map(resp.items, function(item) { + return new ConfigItem(item); + }); + + return Backbone.Model.prototype.parse.call(this, resp); + }, + + isLoaded: function() { + return this.has('items') || this.has('long_description'); + }, + + // check if any of the items has changes + hasChanges: function() { + var items = this.get('items'); + if (!items) return false; + return _.reduce(items, function(a, b) { + return a || b.isChanged(); + }, false); + } + + }); + }); \ No newline at end of file diff --git a/pyload/web/app/scripts/models/ConfigItem.js b/pyload/web/app/scripts/models/ConfigItem.js new file mode 100644 index 000000000..2d325c2a2 --- /dev/null +++ b/pyload/web/app/scripts/models/ConfigItem.js @@ -0,0 +1,40 @@ +define(['jquery', 'backbone', 'underscore', 'app', 'utils/apitypes'], + function($, Backbone, _, App, Api) { + 'use strict'; + + return Backbone.Model.extend({ + + defaults: { + name: '', + label: '', + description: '', + input: null, + default_value: null, + value: null, + // additional attributes + inputView: null + }, + + // Model Constructor + initialize: function() { + + }, + + isChanged: function() { + return this.get('inputView') && this.get('inputView').getVal() !== this.get('value'); + }, + + // set new value and return json + prepareSave: function() { + // set the new value + if (this.get('inputView')) + this.set('value', this.get('inputView').getVal()); + + var data = this.toJSON(); + delete data.inputView; + delete data.description; + + return data; + } + }); + }); \ No newline at end of file diff --git a/pyload/web/app/scripts/models/File.js b/pyload/web/app/scripts/models/File.js new file mode 100644 index 000000000..3beb7f270 --- /dev/null +++ b/pyload/web/app/scripts/models/File.js @@ -0,0 +1,92 @@ +define(['jquery', 'backbone', 'underscore', 'app', 'utils/apitypes'], function($, Backbone, _, App, Api) { + 'use strict'; + + var Finished = [Api.DownloadStatus.Finished, Api.DownloadStatus.Skipped]; + var Failed = [Api.DownloadStatus.Failed, Api.DownloadStatus.Aborted, Api.DownloadStatus.TempOffline, Api.DownloadStatus.Offline]; + // Unfinished - Other + + return Backbone.Model.extend({ + + idAttribute: 'fid', + + defaults: { + fid: -1, + name: null, + package: -1, + owner: -1, + size: -1, + status: -1, + media: -1, + added: -1, + fileorder: -1, + download: null, + + // UI attributes + selected: false, + visible: true, + progress: 0, + eta: 0 + }, + + // Model Constructor + initialize: function() { + + }, + + fetch: function(options) { + options = App.apiRequest( + 'getFileInfo', + {fid: this.get('fid')}, + options); + + return Backbone.Model.prototype.fetch.call(this, options); + }, + + destroy: function(options) { + // also not working when using data + options = App.apiRequest( + 'deleteFiles/[' + this.get('fid') + ']', + null, options); + options.method = 'post'; + + return Backbone.Model.prototype.destroy.call(this, options); + }, + + // Does not send a request to the server + destroyLocal: function(options) { + this.trigger('destroy', this, this.collection, options); + }, + + restart: function(options) { + options = App.apiRequest( + 'restartFile', + {fid: this.get('fid')}, + options); + + return $.ajax(options); + }, + + // Any time a model attribute is set, this method is called + validate: function(attrs) { + + }, + + isDownload: function() { + return this.has('download'); + }, + + isFinished: function() { + return _.indexOf(Finished, this.get('download').status) > -1; + }, + + isUnfinished: function() { + return _.indexOf(Finished, this.get('download').status) === -1 && _.indexOf(Failed, this.get('download').status) === -1; + }, + + isFailed: function() { + return _.indexOf(Failed, this.get('download').status) > -1; + } + + }); + +}); \ No newline at end of file diff --git a/pyload/web/app/scripts/models/InteractionTask.js b/pyload/web/app/scripts/models/InteractionTask.js new file mode 100644 index 000000000..54c739d4b --- /dev/null +++ b/pyload/web/app/scripts/models/InteractionTask.js @@ -0,0 +1,41 @@ +define(['jquery', 'backbone', 'underscore', 'app', 'utils/apitypes'], + function($, Backbone, _, App, Api) { + 'use strict'; + + return Backbone.Model.extend({ + + idAttribute: 'iid', + + defaults: { + iid: -1, + type: null, + input: null, + default_value: null, + title: '', + description: '', + plugin: '', + // additional attributes + result: '' + }, + + // Model Constructor + initialize: function() { + + }, + + save: function(options) { + options = App.apiRequest('setInteractionResult/' + this.get('iid'), + {result: this.get('result')}, options); + + return $.ajax(options); + }, + + isNotification: function() { + return this.get('type') === Api.Interaction.Notification; + }, + + isCaptcha: function() { + return this.get('type') === Api.Interaction.Captcha; + } + }); + }); \ No newline at end of file diff --git a/pyload/web/app/scripts/models/Package.js b/pyload/web/app/scripts/models/Package.js new file mode 100644 index 000000000..a34ec1c69 --- /dev/null +++ b/pyload/web/app/scripts/models/Package.js @@ -0,0 +1,119 @@ +define(['jquery', 'backbone', 'underscore', 'app', 'collections/FileList', 'require'], + function($, Backbone, _, App, FileList, require) { + 'use strict'; + + return Backbone.Model.extend({ + + idAttribute: 'pid', + + defaults: { + pid: -1, + name: null, + folder: '', + root: -1, + owner: -1, + site: '', + comment: '', + password: '', + added: -1, + tags: null, + status: -1, + shared: false, + packageorder: -1, + stats: null, + fids: null, + pids: null, + files: null, // Collection + packs: null, // Collection + + selected: false // For Checkbox + }, + + // Model Constructor + initialize: function() { + }, + + toJSON: function(options) { + var obj = Backbone.Model.prototype.toJSON.call(this, options); + obj.percent = Math.round(obj.stats.linksdone * 100 / obj.stats.linkstotal); + + return obj; + }, + + // Changes url + method and delegates call to super class + fetch: function(options) { + options = App.apiRequest( + 'getFileTree/' + this.get('pid'), + {full: false}, + options); + + return Backbone.Model.prototype.fetch.call(this, options); + }, + + // Create a pseudo package und use search to populate data + search: function(qry, options) { + options = App.apiRequest( + 'findFiles', + {pattern: qry}, + options); + + return Backbone.Model.prototype.fetch.call(this, options); + }, + + save: function(options) { + // TODO + }, + + destroy: function(options) { + // TODO: Not working when using data?, array seems to break it + options = App.apiRequest( + 'deletePackages/[' + this.get('pid') + ']', + null, options); + options.method = 'post'; + + console.log(options); + + return Backbone.Model.prototype.destroy.call(this, options); + }, + + restart: function(options) { + options = App.apiRequest( + 'restartPackage', + {pid: this.get('pid')}, + options); + + var self = this; + options.success = function() { + self.fetch(); + }; + return $.ajax(options); + }, + + parse: function(resp) { + // Package is loaded from tree collection + if (_.has(resp, 'root')) { + if (!this.has('files')) + resp.root.files = new FileList(_.values(resp.files)); + else + this.get('files').set(_.values(resp.files)); + + // circular dependencies needs to be avoided + var PackageList = require('collections/PackageList'); + + if (!this.has('packs')) + resp.root.packs = new PackageList(_.values(resp.packages)); + else + this.get('packs').set(_.values(resp.packages)); + + return resp.root; + } + return Backbone.model.prototype.parse.call(this, resp); + }, + + // Any time a model attribute is set, this method is called + validate: function(attrs) { + + } + + }); + }); \ No newline at end of file diff --git a/pyload/web/app/scripts/models/Progress.js b/pyload/web/app/scripts/models/Progress.js new file mode 100644 index 000000000..b0bbb684d --- /dev/null +++ b/pyload/web/app/scripts/models/Progress.js @@ -0,0 +1,50 @@ +define(['jquery', 'backbone', 'underscore', 'utils/apitypes'], function($, Backbone, _, Api) { + 'use strict'; + + return Backbone.Model.extend({ + + // generated, not submitted + idAttribute: 'pid', + + defaults: { + pid: -1, + plugin: null, + name: null, + statusmsg: -1, + eta: -1, + done: -1, + total: -1, + download: null + }, + + getPercent: function() { + if (this.get('total') > 0) + return Math.round(this.get('done') * 100 / this.get('total')); + return 0; + }, + + // Model Constructor + initialize: function() { + + }, + + // Any time a model attribute is set, this method is called + validate: function(attrs) { + + }, + + toJSON: function(options) { + var obj = Backbone.Model.prototype.toJSON.call(this, options); + obj.percent = this.getPercent(); + obj.downloading = this.isDownload() && this.get('download').status === Api.DownloadStatus.Downloading; + + return obj; + }, + + isDownload : function() { + return this.has('download'); + } + + }); + +}); \ No newline at end of file diff --git a/pyload/web/app/scripts/models/ServerStatus.js b/pyload/web/app/scripts/models/ServerStatus.js new file mode 100644 index 000000000..59739b41e --- /dev/null +++ b/pyload/web/app/scripts/models/ServerStatus.js @@ -0,0 +1,47 @@ +define(['jquery', 'backbone', 'underscore'], + function($, Backbone, _) { + 'use strict'; + + return Backbone.Model.extend({ + + defaults: { + speed: 0, + linkstotal: 0, + linksqueue: 0, + sizetotal: 0, + sizequeue: 0, + notifications: -1, + paused: false, + download: false, + reconnect: false + }, + + // Model Constructor + initialize: function() { + + }, + + fetch: function(options) { + options || (options = {}); + options.url = 'api/getServerStatus'; + + return Backbone.Model.prototype.fetch.call(this, options); + }, + + toJSON: function(options) { + var obj = Backbone.Model.prototype.toJSON.call(this, options); + + obj.linksdone = obj.linkstotal - obj.linksqueue; + obj.sizedone = obj.sizetotal - obj.sizequeue; + if (obj.speed && obj.speed > 0) + obj.eta = Math.round(obj.sizequeue / obj.speed); + else if (obj.sizequeue > 0) + obj.eta = Infinity; + else + obj.eta = 0; + + return obj; + } + + }); + }); \ No newline at end of file diff --git a/pyload/web/app/scripts/models/TreeCollection.js b/pyload/web/app/scripts/models/TreeCollection.js new file mode 100644 index 000000000..2f761e6cc --- /dev/null +++ b/pyload/web/app/scripts/models/TreeCollection.js @@ -0,0 +1,50 @@ +define(['jquery', 'backbone', 'underscore', 'app', 'models/Package', 'collections/FileList', 'collections/PackageList'], + function($, Backbone, _, App, Package, FileList, PackageList) { + 'use strict'; + + // TreeCollection + // A Model and not a collection, aggregates other collections + return Backbone.Model.extend({ + + defaults: { + root: null, + packages: null, + files: null + }, + + initialize: function() { + + }, + + fetch: function(options) { + options || (options = {}); + var pid = options.pid || -1; + + options = App.apiRequest( + 'getFileTree/' + pid, + {full: false}, + options); + + console.log('Fetching package tree ' + pid); + return Backbone.Model.prototype.fetch.call(this, options); + }, + + // Parse the response and updates the collections + parse: function(resp) { + var ret = {}; + if (!this.has('packages')) + ret.packages = new PackageList(_.values(resp.packages)); + else + this.get('packages').set(_.values(resp.packages)); + + if (!this.has('files')) + ret.files = new FileList(_.values(resp.files)); + else + this.get('files').set(_.values(resp.files)); + + ret.root = new Package(resp.root); + return ret; + } + + }); + }); \ No newline at end of file diff --git a/pyload/web/app/scripts/models/UserSession.js b/pyload/web/app/scripts/models/UserSession.js new file mode 100644 index 000000000..a7e9aa848 --- /dev/null +++ b/pyload/web/app/scripts/models/UserSession.js @@ -0,0 +1,20 @@ +define(['jquery', 'backbone', 'underscore', 'utils/apitypes', 'cookie'], + function($, Backbone, _, Api) { + 'use strict'; + + return Backbone.Model.extend({ + + idAttribute: 'username', + + defaults: { + username: null, + permissions: null, + session: null + }, + + // Model Constructor + initialize: function() { + this.set('session', $.cookie('beaker.session.id')); + } + }); + }); \ No newline at end of file diff --git a/pyload/web/app/scripts/router.js b/pyload/web/app/scripts/router.js new file mode 100644 index 000000000..68ea5575d --- /dev/null +++ b/pyload/web/app/scripts/router.js @@ -0,0 +1,29 @@ +/** + * Router defines routes that are handled by registered controller + */ +define([ + // Libraries + 'backbone', + 'marionette', + + // Modules + 'controller' +], + function(Backbone, Marionette, Controller) { + 'use strict'; + + return Backbone.Marionette.AppRouter.extend({ + + appRoutes: { + '': 'dashboard', + 'login': 'login', + 'logout': 'logout', + 'settings': 'settings', + 'accounts': 'accounts', + 'admin': 'admin' + }, + + // Our controller to handle the routes + controller: Controller + }); + }); diff --git a/pyload/web/app/scripts/routers/defaultRouter.js b/pyload/web/app/scripts/routers/defaultRouter.js new file mode 100644 index 000000000..4b00d160c --- /dev/null +++ b/pyload/web/app/scripts/routers/defaultRouter.js @@ -0,0 +1,30 @@ +define(['jquery', 'backbone', 'views/headerView'], function($, Backbone, HeaderView) { + 'use strict'; + + var Router = Backbone.Router.extend({ + + initialize: function() { + Backbone.history.start(); + }, + + // All of your Backbone Routes (add more) + routes: { + + // When there is no hash bang on the url, the home method is called + '': 'home' + + }, + + 'home': function() { + // Instantiating mainView and anotherView instances + var headerView = new HeaderView(); + + // Renders the mainView template + headerView.render(); + + } + }); + + // Returns the Router class + return Router; +}); \ No newline at end of file diff --git a/pyload/web/app/scripts/routers/mobileRouter.js b/pyload/web/app/scripts/routers/mobileRouter.js new file mode 100644 index 000000000..e24cb7a34 --- /dev/null +++ b/pyload/web/app/scripts/routers/mobileRouter.js @@ -0,0 +1,56 @@ +define(['jquery', 'backbone', 'underscore'], function($, Backbone, _) { + 'use strict'; + + return Backbone.Router.extend({ + + initialize: function() { + _.bindAll(this, 'changePage'); + + this.$el = $('#content'); + + // Tells Backbone to start watching for hashchange events + Backbone.history.start(); + + }, + + // All of your Backbone Routes (add more) + routes: { + + // When there is no hash bang on the url, the home method is called + '': 'home' + + }, + + 'home': function() { + + var self = this; + + $('#p1').fastClick(function() { + self.changePage($('

    Page 1


    some content
    sdfdsf
    sdffg

    oiuzz

    ')); + }); + + $('#p2').bind('click', function() { + self.changePage($('

    Page 2


    some content
    sdfdsf

    sdfsdf

    sdffg
    ')); + }); + + }, + + changePage: function(content) { + + var oldpage = this.$el.find('.page'); + content.css({x: '100%'}); + this.$el.append(content); + content.transition({x: 0}, function() { + window.setTimeout(function() { + oldpage.remove(); + }, 400); + }); + +// $("#viewport").transition({x: "100%"}, function(){ +// $("#viewport").html(content); +// $("#viewport").transition({x: 0}); +// }); + } + + }); +}); \ No newline at end of file diff --git a/pyload/web/app/scripts/utils/animations.js b/pyload/web/app/scripts/utils/animations.js new file mode 100644 index 000000000..7f89afef1 --- /dev/null +++ b/pyload/web/app/scripts/utils/animations.js @@ -0,0 +1,129 @@ +define(['jquery', 'underscore', 'transit'], function(jQuery, _) { + 'use strict'; + + // Adds an element and computes its height, which is saved as data attribute + // Important function to have slide animations + jQuery.fn.appendWithHeight = function(element, hide) { + var o = jQuery(this[0]); + element = jQuery(element); + + // TODO: additionally it could be placed out of viewport first + // The real height can only be retrieved when element is on DOM and display:true + element.css('visibility', 'hidden'); + o.append(element); + + var height = element.height(); + + // Hide the element + if (hide === true) { + element.hide(); + element.height(0); + } + + element.css('visibility', ''); + element.data('height', height); + + return this; + }; + + // Shortcut to have a animation when element is added + jQuery.fn.appendWithAnimation = function(element, animation) { + var o = jQuery(this[0]); + element = jQuery(element); + + if (animation === true) + element.hide(); + + o.append(element); + + if (animation === true) + element.fadeIn(); + +// element.calculateHeight(); + + return this; + }; + + // calculate the height and write it to data, should be used on invisible elements + jQuery.fn.calculateHeight = function(setHeight) { + var o = jQuery(this[0]); + var height = o.height(); + if (!height) { + var display = o.css('display'); + o.css('visibility', 'hidden'); + o.show(); + height = o.height(); + + o.css('display', display); + o.css('visibility', ''); + } + + if (setHeight) + o.css('height', height); + + o.data('height', height); + return this; + }; + + // TODO: carry arguments, optional height argument + + // reset arguments, sets overflow hidden + jQuery.fn.slideOut = function(reset) { + var o = jQuery(this[0]); + o.animate({height: o.data('height'), opacity: 'show'}, function() { + // reset css attributes; + if (reset) { + this.css('overflow', ''); + this.css('height', ''); + } + }); + return this; + }; + + jQuery.fn.slideIn = function(reset) { + var o = jQuery(this[0]); + if (reset) { + o.css('overflow', 'hidden'); + } + o.animate({height: 0, opacity: 'hide'}); + return this; + }; + + jQuery.fn.initTooltips = function(placement) { + placement || (placement = 'top'); + + var o = jQuery(this[0]); + o.find('[data-toggle="tooltip"]').tooltip( + { + delay: {show: 800, hide: 100}, + placement: placement + }); + + return this; + }; + + jQuery.fn._transit = jQuery.fn.transit; + + // Overriding transit plugin to support hide and show + jQuery.fn.transit = jQuery.fn.transition = function(props, duration, easing, callback) { + var self = this; + var cb = callback; + var newprops = _.extend({}, props); + + if (newprops && (newprops.opacity === 'hide')) { + newprops.opacity = 0; + + callback = function() { + self.css({display: 'none'}); + if (typeof cb === 'function') { + cb.apply(self); + } + }; + } else if (newprops && (newprops.opacity === 'show')) { + newprops.opacity = 1; + this.css({display: 'block'}); + } + + return this._transit(newprops, duration, easing, callback); + }; +}); \ No newline at end of file diff --git a/pyload/web/app/scripts/utils/apitypes.js b/pyload/web/app/scripts/utils/apitypes.js new file mode 100644 index 000000000..cbbc9064f --- /dev/null +++ b/pyload/web/app/scripts/utils/apitypes.js @@ -0,0 +1,16 @@ +// Autogenerated, do not edit! +/*jslint -W070: false*/ +define([], function() { + 'use strict'; + return { + DownloadState: {'Failed': 3, 'All': 0, 'Unmanaged': 4, 'Finished': 1, 'Unfinished': 2}, + DownloadStatus: {'Downloading': 10, 'NA': 0, 'Processing': 14, 'Waiting': 9, 'Decrypting': 13, 'Paused': 4, 'Failed': 7, 'Finished': 5, 'Skipped': 6, 'Unknown': 16, 'Aborted': 12, 'Online': 2, 'TempOffline': 11, 'Offline': 1, 'Custom': 15, 'Starting': 8, 'Queued': 3}, + FileStatus: {'Remote': 2, 'Ok': 0, 'Missing': 1}, + InputType: {'Multiple': 10, 'Int': 2, 'NA': 0, 'List': 11, 'Bool': 7, 'File': 3, 'Text': 1, 'Table': 12, 'Folder': 4, 'Password': 6, 'Click': 8, 'Select': 9, 'Textbox': 5}, + Interaction: {'Captcha': 2, 'All': 0, 'Query': 4, 'Notification': 1}, + MediaType: {'All': 0, 'Audio': 2, 'Image': 4, 'Other': 1, 'Video': 8, 'Document': 16, 'Archive': 32}, + PackageStatus: {'Paused': 1, 'Remote': 3, 'Folder': 2, 'Ok': 0}, + Permission: {'All': 0, 'Interaction': 32, 'Modify': 4, 'Add': 1, 'Accounts': 16, 'Plugins': 64, 'Download': 8, 'Delete': 2}, + Role: {'Admin': 0, 'User': 1}, + }; +}); \ No newline at end of file diff --git a/pyload/web/app/scripts/utils/dialogs.js b/pyload/web/app/scripts/utils/dialogs.js new file mode 100644 index 000000000..4933b7ed2 --- /dev/null +++ b/pyload/web/app/scripts/utils/dialogs.js @@ -0,0 +1,16 @@ +// Loads all helper and set own handlebars rules +define(['jquery', 'underscore', 'views/abstract/modalView'], function($, _, Modal) { + 'use strict'; + + // Shows the confirm dialog for given context + // on success executes func with context + _.confirm = function(template, func, context) { + template = 'text!tpl/' + template; + _.requireOnce([template], function(html) { + var template = _.compile(html); + var dialog = new Modal(template, _.bind(func, context)); + dialog.show(); + }); + + }; +}); \ No newline at end of file diff --git a/pyload/web/app/scripts/utils/initHB.js b/pyload/web/app/scripts/utils/initHB.js new file mode 100644 index 000000000..d7f582521 --- /dev/null +++ b/pyload/web/app/scripts/utils/initHB.js @@ -0,0 +1,11 @@ +// Loads all helper and set own handlebars rules +define(['underscore', 'handlebars', + 'helpers/formatSize', 'helpers/fileHelper', 'helpers/formatTime'], + function(_, Handlebars) { + 'use strict'; + // Replace with own lexer rules compiled from handlebars.l + Handlebars.Parser.lexer.rules = [/^(?:[^\x00]*?(?=(<%)))/, /^(?:[^\x00]+)/, /^(?:[^\x00]{2,}?(?=(\{\{|$)))/, /^(?:\{\{>)/, /^(?:<%=)/, /^(?:<%\/)/, /^(?:\{\{\^)/, /^(?:<%\s*else\b)/, /^(?:\{<%%)/, /^(?:\{\{&)/, /^(?:<%![\s\S]*?%>)/, /^(?:<%)/, /^(?:=)/, /^(?:\.(?=[%} ]))/, /^(?:\.\.)/, /^(?:[\/.])/, /^(?:\s+)/, /^(?:%%>)/, /^(?:%>)/, /^(?:"(\\["]|[^"])*")/, /^(?:'(\\[']|[^'])*')/, /^(?:@[a-zA-Z]+)/, /^(?:true(?=[%}\s]))/, /^(?:false(?=[%}\s]))/, /^(?:[0-9]+(?=[%}\s]))/, /^(?:[a-zA-Z0-9_$-]+(?=[=%}\s\/.]))/, /^(?:\[[^\]]*\])/, /^(?:.)/, /^(?:$)/]; + _.compile = Handlebars.compile; + + return Handlebars; + }); \ No newline at end of file diff --git a/pyload/web/app/scripts/utils/lazyRequire.js b/pyload/web/app/scripts/utils/lazyRequire.js new file mode 100644 index 000000000..96c07aa24 --- /dev/null +++ b/pyload/web/app/scripts/utils/lazyRequire.js @@ -0,0 +1,97 @@ +// Define the module. +define( + [ + 'require', 'underscore' + ], + function( require, _ ){ + 'use strict'; + + + // Define the states of loading for a given set of modules + // within a require() statement. + var states = { + unloaded: 'UNLOADED', + loading: 'LOADING', + loaded: 'LOADED' + }; + + + // Define the top-level module container. Mostly, we're making + // the top-level container a non-Function so that users won't + // try to invoke this without calling the once() method below. + var lazyRequire = {}; + + + // I will return a new, unique instance of the requrieOnce() + // method. Each instance will only call the require() method + // once internally. + lazyRequire.once = function(){ + + // The modules start in an unloaded state before + // requireOnce() is invoked by the calling code. + var state = states.unloaded; + var args; + + var requireOnce = function(dependencies, loadCallback ){ + + // Use the module state to determine which method to + // invoke (or just to ignore the invocation). + if (state === states.loaded){ + loadCallback.apply(null, args); + + // The modules have not yet been requested - let's + // lazy load them. + } else if (state !== states.loading){ + + // We're about to load the modules asynchronously; + // flag the interim state. + state = states.loading; + + // Load the modules. + require( + dependencies, + function(){ + + args = arguments; + loadCallback.apply( null, args ); + state = states.loaded; + + + } + ); + + // RequireJS is currently loading the modules + // asynchronously, but they have not finished + // loading yet. + } else { + + // Simply ignore this call. + return; + + } + + }; + + // Return the new lazy loader. + return( requireOnce ); + + }; + + + // -------------------------------------------------- // + // -------------------------------------------------- // + + // Set up holder for underscore + var instances = {}; + _.requireOnce = function(dependencies, loadCallback) { + if (!_.has(instances, dependencies)) + instances[dependencies] = lazyRequire.once(); + + return instances[dependencies](dependencies, loadCallback); + }; + + + // Return the module definition. + return( lazyRequire ); + } +); \ No newline at end of file diff --git a/pyload/web/app/scripts/vendor/Handlebars-1.0rc1.js b/pyload/web/app/scripts/vendor/Handlebars-1.0rc1.js new file mode 100644 index 000000000..991242461 --- /dev/null +++ b/pyload/web/app/scripts/vendor/Handlebars-1.0rc1.js @@ -0,0 +1,1927 @@ +// lib/handlebars/base.js +(function () { +/*jshint eqnull:true*/ +this.Handlebars = {}; + +(function(Handlebars) { + +Handlebars.VERSION = "1.0.rc.1"; + +Handlebars.helpers = {}; +Handlebars.partials = {}; + +Handlebars.registerHelper = function(name, fn, inverse) { + if(inverse) { fn.not = inverse; } + this.helpers[name] = fn; +}; + +Handlebars.registerPartial = function(name, str) { + this.partials[name] = str; +}; + +Handlebars.registerHelper('helperMissing', function(arg) { + if(arguments.length === 2) { + return undefined; + } else { + throw new Error("Could not find property '" + arg + "'"); + } +}); + +var toString = Object.prototype.toString, functionType = "[object Function]"; + +Handlebars.registerHelper('blockHelperMissing', function(context, options) { + var inverse = options.inverse || function() {}, fn = options.fn; + + + var ret = ""; + var type = toString.call(context); + + if(type === functionType) { context = context.call(this); } + + if(context === true) { + return fn(this); + } else if(context === false || context == null) { + return inverse(this); + } else if(type === "[object Array]") { + if(context.length > 0) { + return Handlebars.helpers.each(context, options); + } else { + return inverse(this); + } + } else { + return fn(context); + } +}); + +Handlebars.K = function() {}; + +Handlebars.createFrame = Object.create || function(object) { + Handlebars.K.prototype = object; + var obj = new Handlebars.K(); + Handlebars.K.prototype = null; + return obj; +}; + +Handlebars.registerHelper('each', function(context, options) { + var fn = options.fn, inverse = options.inverse; + var ret = "", data; + + if (options.data) { + data = Handlebars.createFrame(options.data); + } + + if(context && context.length > 0) { + for(var i=0, j=context.length; i 2) { + expected.push("'" + this.terminals_[p] + "'"); + } + if (this.lexer.showPosition) { + errStr = "Parse error on line " + (yylineno + 1) + ":\n" + this.lexer.showPosition() + "\nExpecting " + expected.join(", ") + ", got '" + (this.terminals_[symbol] || symbol) + "'"; + } else { + errStr = "Parse error on line " + (yylineno + 1) + ": Unexpected " + (symbol == 1?"end of input":"'" + (this.terminals_[symbol] || symbol) + "'"); + } + this.parseError(errStr, {text: this.lexer.match, token: this.terminals_[symbol] || symbol, line: this.lexer.yylineno, loc: yyloc, expected: expected}); + } + } + if (action[0] instanceof Array && action.length > 1) { + throw new Error("Parse Error: multiple actions possible at state: " + state + ", token: " + symbol); + } + switch (action[0]) { + case 1: + stack.push(symbol); + vstack.push(this.lexer.yytext); + lstack.push(this.lexer.yylloc); + stack.push(action[1]); + symbol = null; + if (!preErrorSymbol) { + yyleng = this.lexer.yyleng; + yytext = this.lexer.yytext; + yylineno = this.lexer.yylineno; + yyloc = this.lexer.yylloc; + if (recovering > 0) + recovering--; + } else { + symbol = preErrorSymbol; + preErrorSymbol = null; + } + break; + case 2: + len = this.productions_[action[1]][1]; + yyval.$ = vstack[vstack.length - len]; + yyval._$ = {first_line: lstack[lstack.length - (len || 1)].first_line, last_line: lstack[lstack.length - 1].last_line, first_column: lstack[lstack.length - (len || 1)].first_column, last_column: lstack[lstack.length - 1].last_column}; + if (ranges) { + yyval._$.range = [lstack[lstack.length - (len || 1)].range[0], lstack[lstack.length - 1].range[1]]; + } + r = this.performAction.call(yyval, yytext, yyleng, yylineno, this.yy, action[1], vstack, lstack); + if (typeof r !== "undefined") { + return r; + } + if (len) { + stack = stack.slice(0, -1 * len * 2); + vstack = vstack.slice(0, -1 * len); + lstack = lstack.slice(0, -1 * len); + } + stack.push(this.productions_[action[1]][0]); + vstack.push(yyval.$); + lstack.push(yyval._$); + newState = table[stack[stack.length - 2]][stack[stack.length - 1]]; + stack.push(newState); + break; + case 3: + return true; + } + } + return true; +} +}; +/* Jison generated lexer */ +var lexer = (function(){ +var lexer = ({EOF:1, +parseError:function parseError(str, hash) { + if (this.yy.parser) { + this.yy.parser.parseError(str, hash); + } else { + throw new Error(str); + } + }, +setInput:function (input) { + this._input = input; + this._more = this._less = this.done = false; + this.yylineno = this.yyleng = 0; + this.yytext = this.matched = this.match = ''; + this.conditionStack = ['INITIAL']; + this.yylloc = {first_line:1,first_column:0,last_line:1,last_column:0}; + if (this.options.ranges) this.yylloc.range = [0,0]; + this.offset = 0; + return this; + }, +input:function () { + var ch = this._input[0]; + this.yytext += ch; + this.yyleng++; + this.offset++; + this.match += ch; + this.matched += ch; + var lines = ch.match(/(?:\r\n?|\n).*/g); + if (lines) { + this.yylineno++; + this.yylloc.last_line++; + } else { + this.yylloc.last_column++; + } + if (this.options.ranges) this.yylloc.range[1]++; + + this._input = this._input.slice(1); + return ch; + }, +unput:function (ch) { + var len = ch.length; + var lines = ch.split(/(?:\r\n?|\n)/g); + + this._input = ch + this._input; + this.yytext = this.yytext.substr(0, this.yytext.length-len-1); + //this.yyleng -= len; + this.offset -= len; + var oldLines = this.match.split(/(?:\r\n?|\n)/g); + this.match = this.match.substr(0, this.match.length-1); + this.matched = this.matched.substr(0, this.matched.length-1); + + if (lines.length-1) this.yylineno -= lines.length-1; + var r = this.yylloc.range; + + this.yylloc = {first_line: this.yylloc.first_line, + last_line: this.yylineno+1, + first_column: this.yylloc.first_column, + last_column: lines ? + (lines.length === oldLines.length ? this.yylloc.first_column : 0) + oldLines[oldLines.length - lines.length].length - lines[0].length: + this.yylloc.first_column - len + }; + + if (this.options.ranges) { + this.yylloc.range = [r[0], r[0] + this.yyleng - len]; + } + return this; + }, +more:function () { + this._more = true; + return this; + }, +less:function (n) { + this.unput(this.match.slice(n)); + }, +pastInput:function () { + var past = this.matched.substr(0, this.matched.length - this.match.length); + return (past.length > 20 ? '...':'') + past.substr(-20).replace(/\n/g, ""); + }, +upcomingInput:function () { + var next = this.match; + if (next.length < 20) { + next += this._input.substr(0, 20-next.length); + } + return (next.substr(0,20)+(next.length > 20 ? '...':'')).replace(/\n/g, ""); + }, +showPosition:function () { + var pre = this.pastInput(); + var c = new Array(pre.length + 1).join("-"); + return pre + this.upcomingInput() + "\n" + c+"^"; + }, +next:function () { + if (this.done) { + return this.EOF; + } + if (!this._input) this.done = true; + + var token, + match, + tempMatch, + index, + col, + lines; + if (!this._more) { + this.yytext = ''; + this.match = ''; + } + var rules = this._currentRules(); + for (var i=0;i < rules.length; i++) { + tempMatch = this._input.match(this.rules[rules[i]]); + if (tempMatch && (!match || tempMatch[0].length > match[0].length)) { + match = tempMatch; + index = i; + if (!this.options.flex) break; + } + } + if (match) { + lines = match[0].match(/(?:\r\n?|\n).*/g); + if (lines) this.yylineno += lines.length; + this.yylloc = {first_line: this.yylloc.last_line, + last_line: this.yylineno+1, + first_column: this.yylloc.last_column, + last_column: lines ? lines[lines.length-1].length-lines[lines.length-1].match(/\r?\n?/)[0].length : this.yylloc.last_column + match[0].length}; + this.yytext += match[0]; + this.match += match[0]; + this.matches = match; + this.yyleng = this.yytext.length; + if (this.options.ranges) { + this.yylloc.range = [this.offset, this.offset += this.yyleng]; + } + this._more = false; + this._input = this._input.slice(match[0].length); + this.matched += match[0]; + token = this.performAction.call(this, this.yy, this, rules[index],this.conditionStack[this.conditionStack.length-1]); + if (this.done && this._input) this.done = false; + if (token) return token; + else return; + } + if (this._input === "") { + return this.EOF; + } else { + return this.parseError('Lexical error on line '+(this.yylineno+1)+'. Unrecognized text.\n'+this.showPosition(), + {text: "", token: null, line: this.yylineno}); + } + }, +lex:function lex() { + var r = this.next(); + if (typeof r !== 'undefined') { + return r; + } else { + return this.lex(); + } + }, +begin:function begin(condition) { + this.conditionStack.push(condition); + }, +popState:function popState() { + return this.conditionStack.pop(); + }, +_currentRules:function _currentRules() { + return this.conditions[this.conditionStack[this.conditionStack.length-1]].rules; + }, +topState:function () { + return this.conditionStack[this.conditionStack.length-2]; + }, +pushState:function begin(condition) { + this.begin(condition); + }}); +lexer.options = {}; +lexer.performAction = function anonymous(yy,yy_,$avoiding_name_collisions,YY_START) { + +var YYSTATE=YY_START +switch($avoiding_name_collisions) { +case 0: + if(yy_.yytext.slice(-1) !== "\\") this.begin("mu"); + if(yy_.yytext.slice(-1) === "\\") yy_.yytext = yy_.yytext.substr(0,yy_.yyleng-1), this.begin("emu"); + if(yy_.yytext) return 14; + +break; +case 1: return 14; +break; +case 2: + if(yy_.yytext.slice(-1) !== "\\") this.popState(); + if(yy_.yytext.slice(-1) === "\\") yy_.yytext = yy_.yytext.substr(0,yy_.yyleng-1); + return 14; + +break; +case 3: return 24; +break; +case 4: return 16; +break; +case 5: return 20; +break; +case 6: return 19; +break; +case 7: return 19; +break; +case 8: return 23; +break; +case 9: return 23; +break; +case 10: yy_.yytext = yy_.yytext.substr(3,yy_.yyleng-5); this.popState(); return 15; +break; +case 11: return 22; +break; +case 12: return 35; +break; +case 13: return 34; +break; +case 14: return 34; +break; +case 15: return 37; +break; +case 16: /*ignore whitespace*/ +break; +case 17: this.popState(); return 18; +break; +case 18: this.popState(); return 18; +break; +case 19: yy_.yytext = yy_.yytext.substr(1,yy_.yyleng-2).replace(/\\"/g,'"'); return 29; +break; +case 20: yy_.yytext = yy_.yytext.substr(1,yy_.yyleng-2).replace(/\\"/g,'"'); return 29; +break; +case 21: yy_.yytext = yy_.yytext.substr(1); return 27; +break; +case 22: return 31; +break; +case 23: return 31; +break; +case 24: return 30; +break; +case 25: return 34; +break; +case 26: yy_.yytext = yy_.yytext.substr(1, yy_.yyleng-2); return 34; +break; +case 27: return 'INVALID'; +break; +case 28: return 5; +break; +} +}; +lexer.rules = [/^(?:[^\x00]*?(?=(\{\{)))/,/^(?:[^\x00]+)/,/^(?:[^\x00]{2,}?(?=(\{\{|$)))/,/^(?:\{\{>)/,/^(?:\{\{#)/,/^(?:\{\{\/)/,/^(?:\{\{\^)/,/^(?:\{\{\s*else\b)/,/^(?:\{\{\{)/,/^(?:\{\{&)/,/^(?:\{\{![\s\S]*?\}\})/,/^(?:\{\{)/,/^(?:=)/,/^(?:\.(?=[} ]))/,/^(?:\.\.)/,/^(?:[\/.])/,/^(?:\s+)/,/^(?:\}\}\})/,/^(?:\}\})/,/^(?:"(\\["]|[^"])*")/,/^(?:'(\\[']|[^'])*')/,/^(?:@[a-zA-Z]+)/,/^(?:true(?=[}\s]))/,/^(?:false(?=[}\s]))/,/^(?:[0-9]+(?=[}\s]))/,/^(?:[a-zA-Z0-9_$-]+(?=[=}\s\/.]))/,/^(?:\[[^\]]*\])/,/^(?:.)/,/^(?:$)/]; +lexer.conditions = {"mu":{"rules":[3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28],"inclusive":false},"emu":{"rules":[2],"inclusive":false},"INITIAL":{"rules":[0,1,28],"inclusive":true}}; +return lexer;})() +parser.lexer = lexer; +function Parser () { this.yy = {}; }Parser.prototype = parser;parser.Parser = Parser; +return new Parser; +})(); +if (typeof require !== 'undefined' && typeof exports !== 'undefined') { +exports.parser = handlebars; +exports.Parser = handlebars.Parser; +exports.parse = function () { return handlebars.parse.apply(handlebars, arguments); } +exports.main = function commonjsMain(args) { + if (!args[1]) + throw new Error('Usage: '+args[0]+' FILE'); + var source, cwd; + if (typeof process !== 'undefined') { + source = require('fs').readFileSync(require('path').resolve(args[1]), "utf8"); + } else { + source = require("file").path(require("file").cwd()).join(args[1]).read({charset: "utf-8"}); + } + return exports.parser.parse(source); +} +if (typeof module !== 'undefined' && require.main === module) { + exports.main(typeof process !== 'undefined' ? process.argv.slice(1) : require("system").args); +} +}; +; +// lib/handlebars/compiler/base.js +Handlebars.Parser = handlebars; + +Handlebars.parse = function(string) { + Handlebars.Parser.yy = Handlebars.AST; + return Handlebars.Parser.parse(string); +}; + +Handlebars.print = function(ast) { + return new Handlebars.PrintVisitor().accept(ast); +}; + +Handlebars.logger = { + DEBUG: 0, INFO: 1, WARN: 2, ERROR: 3, level: 3, + + // override in the host environment + log: function(level, str) {} +}; + +Handlebars.log = function(level, str) { Handlebars.logger.log(level, str); }; +; +// lib/handlebars/compiler/ast.js +(function() { + + Handlebars.AST = {}; + + Handlebars.AST.ProgramNode = function(statements, inverse) { + this.type = "program"; + this.statements = statements; + if(inverse) { this.inverse = new Handlebars.AST.ProgramNode(inverse); } + }; + + Handlebars.AST.MustacheNode = function(rawParams, hash, unescaped) { + this.type = "mustache"; + this.escaped = !unescaped; + this.hash = hash; + + var id = this.id = rawParams[0]; + var params = this.params = rawParams.slice(1); + + // a mustache is an eligible helper if: + // * its id is simple (a single part, not `this` or `..`) + var eligibleHelper = this.eligibleHelper = id.isSimple; + + // a mustache is definitely a helper if: + // * it is an eligible helper, and + // * it has at least one parameter or hash segment + this.isHelper = eligibleHelper && (params.length || hash); + + // if a mustache is an eligible helper but not a definite + // helper, it is ambiguous, and will be resolved in a later + // pass or at runtime. + }; + + Handlebars.AST.PartialNode = function(id, context) { + this.type = "partial"; + + // TODO: disallow complex IDs + + this.id = id; + this.context = context; + }; + + var verifyMatch = function(open, close) { + if(open.original !== close.original) { + throw new Handlebars.Exception(open.original + " doesn't match " + close.original); + } + }; + + Handlebars.AST.BlockNode = function(mustache, program, inverse, close) { + verifyMatch(mustache.id, close); + this.type = "block"; + this.mustache = mustache; + this.program = program; + this.inverse = inverse; + + if (this.inverse && !this.program) { + this.isInverse = true; + } + }; + + Handlebars.AST.ContentNode = function(string) { + this.type = "content"; + this.string = string; + }; + + Handlebars.AST.HashNode = function(pairs) { + this.type = "hash"; + this.pairs = pairs; + }; + + Handlebars.AST.IdNode = function(parts) { + this.type = "ID"; + this.original = parts.join("."); + + var dig = [], depth = 0; + + for(var i=0,l=parts.length; i": ">", + '"': """, + "'": "'", + "`": "`" + }; + + var badChars = /[&<>"'`]/g; + var possible = /[&<>"'`]/; + + var escapeChar = function(chr) { + return escape[chr] || "&"; + }; + + Handlebars.Utils = { + escapeExpression: function(string) { + // don't escape SafeStrings, since they're already safe + if (string instanceof Handlebars.SafeString) { + return string.toString(); + } else if (string == null || string === false) { + return ""; + } + + if(!possible.test(string)) { return string; } + return string.replace(badChars, escapeChar); + }, + + isEmpty: function(value) { + if (typeof value === "undefined") { + return true; + } else if (value === null) { + return true; + } else if (value === false) { + return true; + } else if(Object.prototype.toString.call(value) === "[object Array]" && value.length === 0) { + return true; + } else { + return false; + } + } + }; +})();; +// lib/handlebars/compiler/compiler.js + +/*jshint eqnull:true*/ +Handlebars.Compiler = function() {}; +Handlebars.JavaScriptCompiler = function() {}; + +(function(Compiler, JavaScriptCompiler) { + // the foundHelper register will disambiguate helper lookup from finding a + // function in a context. This is necessary for mustache compatibility, which + // requires that context functions in blocks are evaluated by blockHelperMissing, + // and then proceed as if the resulting value was provided to blockHelperMissing. + + Compiler.prototype = { + compiler: Compiler, + + disassemble: function() { + var opcodes = this.opcodes, opcode, out = [], params, param; + + for (var i=0, l=opcodes.length; i 0) { + this.source[1] = this.source[1] + ", " + locals.join(", "); + } + + // Generate minimizer alias mappings + if (!this.isChild) { + var aliases = []; + for (var alias in this.context.aliases) { + this.source[1] = this.source[1] + ', ' + alias + '=' + this.context.aliases[alias]; + } + } + + if (this.source[1]) { + this.source[1] = "var " + this.source[1].substring(2) + ";"; + } + + // Merge children + if (!this.isChild) { + this.source[1] += '\n' + this.context.programs.join('\n') + '\n'; + } + + if (!this.environment.isSimple) { + this.source.push("return buffer;"); + } + + var params = this.isChild ? ["depth0", "data"] : ["Handlebars", "depth0", "helpers", "partials", "data"]; + + for(var i=0, l=this.environment.depths.list.length; i this.stackVars.length) { this.stackVars.push("stack" + this.stackSlot); } + return "stack" + this.stackSlot; + }, + + popStack: function() { + var item = this.compileStack.pop(); + + if (item instanceof Literal) { + return item.value; + } else { + this.stackSlot--; + return item; + } + }, + + topStack: function() { + var item = this.compileStack[this.compileStack.length - 1]; + + if (item instanceof Literal) { + return item.value; + } else { + return item; + } + }, + + quotedString: function(str) { + return '"' + str + .replace(/\\/g, '\\\\') + .replace(/"/g, '\\"') + .replace(/\n/g, '\\n') + .replace(/\r/g, '\\r') + '"'; + }, + + setupHelper: function(paramSize, name) { + var params = []; + this.setupParams(paramSize, params); + var foundHelper = this.nameLookup('helpers', name, 'helper'); + + return { + params: params, + name: foundHelper, + callParams: ["depth0"].concat(params).join(", "), + helperMissingParams: ["depth0", this.quotedString(name)].concat(params).join(", ") + }; + }, + + // the params and contexts arguments are passed in arrays + // to fill in + setupParams: function(paramSize, params) { + var options = [], contexts = [], param, inverse, program; + + options.push("hash:" + this.popStack()); + + inverse = this.popStack(); + program = this.popStack(); + + // Avoid setting fn and inverse if neither are set. This allows + // helpers to do a check for `if (options.fn)` + if (program || inverse) { + if (!program) { + this.context.aliases.self = "this"; + program = "self.noop"; + } + + if (!inverse) { + this.context.aliases.self = "this"; + inverse = "self.noop"; + } + + options.push("inverse:" + inverse); + options.push("fn:" + program); + } + + for(var i=0; i') + .appendTo(document.body) + + this.$backdrop.click( + this.options.backdrop == 'static' ? + $.proxy(this.$element[0].focus, this.$element[0]) + : $.proxy(this.hide, this) + ) + + if (doAnimate) this.$backdrop[0].offsetWidth // force reflow + + this.$backdrop.addClass('in') + + if (!callback) return + + doAnimate ? + this.$backdrop.one($.support.transition.end, callback) : + callback() + + } else if (!this.isShown && this.$backdrop) { + this.$backdrop.removeClass('in') + + $.support.transition && this.$element.hasClass('fade')? + this.$backdrop.one($.support.transition.end, callback) : + callback() + + } else if (callback) { + callback() + } + } + } + + + /* MODAL PLUGIN DEFINITION + * ======================= */ + + var old = $.fn.modal + + $.fn.modal = function (option) { + return this.each(function () { + var $this = $(this) + , data = $this.data('modal') + , options = $.extend({}, $.fn.modal.defaults, $this.data(), typeof option == 'object' && option) + if (!data) $this.data('modal', (data = new Modal(this, options))) + if (typeof option == 'string') data[option]() + else if (options.show) data.show() + }) + } + + $.fn.modal.defaults = { + backdrop: true + , keyboard: true + , show: true + } + + $.fn.modal.Constructor = Modal + + + /* MODAL NO CONFLICT + * ================= */ + + $.fn.modal.noConflict = function () { + $.fn.modal = old + return this + } + + + /* MODAL DATA-API + * ============== */ + + $(document).on('click.modal.data-api', '[data-toggle="modal"]', function (e) { + var $this = $(this) + , href = $this.attr('href') + , $target = $($this.attr('data-target') || (href && href.replace(/.*(?=#[^\s]+$)/, ''))) //strip for ie7 + , option = $target.data('modal') ? 'toggle' : $.extend({ remote:!/#/.test(href) && href }, $target.data(), $this.data()) + + e.preventDefault() + + $target + .modal(option) + .one('hide', function () { + $this.focus() + }) + }) + +}(window.jQuery); + +/* ============================================================ + * bootstrap-dropdown.js v2.3.2 + * http://twitter.github.com/bootstrap/javascript.html#dropdowns + * ============================================================ + * Copyright 2012 Twitter, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================ */ + + +!function ($) { + + "use strict"; // jshint ;_; + + + /* DROPDOWN CLASS DEFINITION + * ========================= */ + + var toggle = '[data-toggle=dropdown]' + , Dropdown = function (element) { + var $el = $(element).on('click.dropdown.data-api', this.toggle) + $('html').on('click.dropdown.data-api', function () { + $el.parent().removeClass('open') + }) + } + + Dropdown.prototype = { + + constructor: Dropdown + + , toggle: function (e) { + var $this = $(this) + , $parent + , isActive + + if ($this.is('.disabled, :disabled')) return + + $parent = getParent($this) + + isActive = $parent.hasClass('open') + + clearMenus() + + if (!isActive) { + if ('ontouchstart' in document.documentElement) { + // if mobile we we use a backdrop because click events don't delegate + $('