pax_global_header00006660000000000000000000000064132017764630014523gustar00rootroot0000000000000052 comment=e94a9051f54abb66fd3963933587b2d4057a1614 .mailmap000066400000000000000000000010741320177646300125120ustar00rootroot00000000000000Calvin Spencer Kwok capkokoon David Kurz GaspardT idlelop ljsdoug Oscar Padilla Valérian Rousset Vitaly Shukela 115.sh000066400000000000000000000055331320177646300117370ustar00rootroot00000000000000# Plowshare 115.com module # Copyright (c) 2010-2012 Plowshare team # # This file is part of Plowshare. # # Plowshare is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Plowshare is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Plowshare. If not, see . MODULE_115_REGEXP_URL='http://\([[:alnum:]]\+\.\)\?115\.com/file/' MODULE_115_DOWNLOAD_OPTIONS=" AUTH,a,auth,a=USER:PASSWORD,User account (mandatory)" MODULE_115_DOWNLOAD_RESUME=no MODULE_115_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=unused MODULE_115_DOWNLOAD_SUCCESSIVE_INTERVAL= # Output a 115.com file download URL # $1: cookie file # $2: 115.com url # stdout: real file download link 115_download() { local COOKIEFILE=$1 local URL=$2 local PAGE JSON LINKS HEADERS DIRECT FILENAME U1 U2 if [ -z "$AUTH" ]; then log_error 'Anonymous users cannot download links' return $ERR_LINK_NEED_PERMISSIONS fi LOGIN_DATA=$(echo \ 'login[account]=$USER&login[passwd]=$PASSWORD&back=http%3A%2F%2Fwww.115.com&goto=http%3A%2F%2F115.com' | uri_encode) post_login "$AUTH" "$COOKIEFILE" "$LOGIN_DATA" 'http://passport.115.com/?ac=login' '-L' >/dev/null || return PAGE=$(curl -L -b "$COOKIEFILE" "$URL" | break_html_lines) || return if matchi "file_size:[[:space:]]*'0B'," "$PAGE"; then return $ERR_LINK_DEAD fi U1=$(echo "$PAGE" | parse_all 'url:' "'\(/?ct=download[^']*\)" | last_line) || return U2=$(echo "$PAGE" | parse 'GetMyDownloadAddress(' "('\([^']*\)") || return # {"state":true,"urls":[{"client":1,"url":"http:\/\/119. ... JSON=$(curl -b "$COOKIEFILE" "http://115.com$U1$U2") || return if ! match_json_true state "$JSON"; then log_error 'Bad state. Site updated?' return $ERR_FATAL fi LINKS=$(echo "$JSON" | parse_json 'url' split) || return # There are usually mirrors (do a HTTP HEAD request to check dead mirror) while read URL; do HEADERS=$(curl -I "$URL") || return FILENAME=$(echo "$HEADERS" | grep_http_header_content_disposition) if [ -n "$FILENAME" ]; then echo "$URL" echo "$FILENAME" return 0 fi DIRECT=$(echo "$HEADERS" | grep_http_header_content_type) || return if [ "$DIRECT" = 'application/octet-stream' ]; then echo "$URL" return 0 fi done <<< "$LINKS" log_debug 'all mirrors are dead' return $ERR_FATAL } 180upload.sh000066400000000000000000000215721320177646300131470ustar00rootroot00000000000000# Plowshare 180upload.com module # Copyright (c) 2012-2013 Plowshare team # # This file is part of Plowshare. # # Plowshare is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Plowshare is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Plowshare. If not, see . # # Note: This module is similar to filebox and zalaa (for upload) MODULE_180UPLOAD_REGEXP_URL='https\?://\(www\.\)\?180upload\.com/' MODULE_180UPLOAD_DOWNLOAD_OPTIONS=" AUTH_FREE,b,auth-free,a=USER:PASSWORD,Free account" MODULE_180UPLOAD_DOWNLOAD_RESUME=yes MODULE_180UPLOAD_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no MODULE_180UPLOAD_DOWNLOAD_SUCCESSIVE_INTERVAL= MODULE_180UPLOAD_UPLOAD_OPTIONS=" AUTH_FREE,b,auth-free,a=USER:PASSWORD,Free account DESCRIPTION,d,description,S=DESCRIPTION,Set file description TOEMAIL,,email-to,e=EMAIL, field for notification email" MODULE_180UPLOAD_UPLOAD_REMOTE_SUPPORT=no MODULE_180UPLOAD_PROBE_OPTIONS="" # Static function. Proceed with login # $1: authentication # $2: cookie file # $3: base URL # stdout: account type ("" or "free" or "premium") on success 180upload_login() { local -r AUTH_FREE=$1 local -r COOKIE_FILE=$2 local -r BASE_URL=$3 local LOGIN_DATA PAGE ERR MSG NAME # Try to revive old session... if COOKIES=$(storage_get 'cookies'); then echo "$COOKIES" > "$COOKIE_FILE" fi # ... and check login status PAGE=$(curl -b "$COOKIE_FILE" -b 'lang=english' "$BASE_URL") || return if match 'Login' "$PAGE"; then log_debug 'Cached cookies invalid, deleting storage entry' storage_set 'cookies' [ -n "$AUTH_FREE" ] || return 0 LOGIN_DATA='op=login&redirect=&login=$USER&password=$PASSWORD' PAGE=$(post_login "$AUTH_FREE" "$COOKIE_FILE" "$LOGIN_DATA" \ "${BASE_URL}" -b 'lang=english') || return # Check for errors # Note: Successful login redirects and sets cookies 'login' and 'xfss' # Get error message, if any ERR=$(parse_tag_quiet "class='err'" 'b' <<< "$PAGE") if [ -n "$ERR" ]; then log_debug "Remote error: $ERR" return $ERR_LOGIN_FAILED fi storage_set 'cookies' "$(cat "$COOKIE_FILE")" MSG='logged in as' else MSG='reused login for' fi # Get username NAME=$(parse_cookie 'login' < "$COOKIE_FILE") || return log_debug "Successfully $MSG member '$NAME'" echo 'free' } # Output a 180upload.com file download URL # $1: cookie file # $2: 180upload.com url # stdout: real file download link 180upload_download() { local -r COOKIE_FILE=$1 local -r URL=$2 local -r BASE_URL='http://180upload.com/' local PAGE ERR PUBKEY RESP CHALL ID CAPTCHA_DATA local FORM_HTML FORM_OP FORM_ID FORM_RAND FORM_DD FORM_METHOD_F FORM_METHOD_P 180upload_login "$AUTH_FREE" "$COOKIE_FILE" "$BASE_URL" > /dev/null || return PAGE=$(curl -c "$COOKIE_FILE" -b "$COOKIE_FILE" -b 'lang=english' "$URL") || return if match '

File Not Found

' "$PAGE"; then return $ERR_LINK_DEAD fi FORM_HTML=$(grep_form_by_name "$PAGE" 'F1') || return FORM_OP=$(parse_form_input_by_name 'op' <<< "$FORM_HTML") || return FORM_ID=$(parse_form_input_by_name 'id' <<< "$FORM_HTML") || return FORM_RAND=$(parse_form_input_by_name 'rand' <<< "$FORM_HTML") || return FORM_DD=$(parse_form_input_by_name 'down_direct' <<< "$FORM_HTML") || return FORM_METHOD_F=$(parse_form_input_by_name_quiet 'method_free' <<< "$FORM_HTML") FORM_METHOD_P=$(parse_form_input_by_name_quiet 'method_premium' <<< "$FORM_HTML") # Check for Captcha if match 'api\.solvemedia\.com' "$FORM_HTML"; then log_debug 'Solve Media CAPTCHA found' PUBKEY='MIqUIMADf7KbDRf0ANI-9wLP.8iJSG9N' RESP=$(solvemedia_captcha_process $PUBKEY) || return { read CHALL; read ID; } <<< "$RESP" CAPTCHA_DATA="-F adcopy_challenge=$CHALL -F adcopy_response=none" elif match 'RecaptchaOptions' "$FORM_HTML"; then log_debug 'reCaptcha found' local WORD PUBKEY='6LeEc8wSAAAAAJG8vzd61DufFYS_I6nXwMkl4dhI' RESP=$(recaptcha_process $PUBKEY) || return { read WORD; read CHALL; read ID; } <<< "$RESP" CAPTCHA_DATA="-F recaptcha_challenge_field=$CHALL -F recaptcha_response_field=$WORD" fi PAGE=$(curl -b "$COOKIE_FILE" -b 'lang=english' \ -F "op=$FORM_OP" -F "id=$FORM_ID" -F "rand=$FORM_RAND" \ -F 'referer=' -F "method_free=$FORM_METHOD_F" \ -F "method_premium=$FORM_METHOD_P" $CAPTCHA_DATA \ -F "down_direct=$FORM_DD" "$URL") || return # Get error message, if any ERR=$(parse_tag_quiet '
/dev/null || return PAGE=$(curl -b "$COOKIE_FILE" -b 'lang=english' "$BASE_URL") || return MAX_SIZE=$(parse 'Up to ' 'to \([[:digit:]]\+\) Mb' <<< "$PAGE") || return readonly MAX_SIZE=$(( MAX_SIZE * 1048576 )) # convert MiB to B SIZE=$(get_filesize "$FILE") || return if [ "$SIZE" -gt "$MAX_SIZE" ]; then log_debug "file is bigger than $MAX_SIZE" return $ERR_SIZE_LIMIT_EXCEEDED fi FORM_HTML=$(grep_form_by_name "$PAGE" 'file') || return FORM_ACTION=$(parse_form_action <<< "$FORM_HTML") || return FORM_UTYPE=$(parse_form_input_by_name 'upload_type' <<< "$FORM_HTML") || return FORM_SESS=$(parse_form_input_by_name_quiet 'sess_id' <<< "$FORM_HTML") FORM_TMP_SRV=$(parse_form_input_by_name 'srv_tmp_url' <<< "$FORM_HTML") || return log_debug "Server URL: '$FORM_TMP_SRV'" UPLOAD_ID=$(random dec 12) PAGE=$(curl "${FORM_TMP_SRV}/status.html?${UPLOAD_ID}=$DEST_FILE=180upload.com") || return # Sanity check. Avoid failure after effective upload if match '>404 Not Found<' "$PAGE"; then log_error 'upstream error (404)' return $ERR_FATAL fi PAGE=$(curl_with_log --include -b "$COOKIE_FILE" \ -F "upload_type=$FORM_UTYPE" -F "sess_id=$FORM_SESS" \ -F "srv_tmp_url=$FORM_TMP_SRV" -F "file_1=@$FILE;filename=$DEST_FILE" \ --form-string "file_1_descr=$DESCRIPTION" \ --form-string "link_rcpt=$TOEMAIL" \ -F 'tos=1' -F 'submit_btn= Upload! ' \ "${FORM_ACTION}${UPLOAD_ID}") || return STATUS_URL=$(grep_http_header_location <<< "$PAGE") || return PAGE=$(curl -b "$COOKIE_FILE" -b 'lang=english' $STATUS_URL) || return # Parse and output download and delete link parse 'Download Link' '>\(http[^<]\+\)<' 1 <<< "$PAGE" || return parse 'Delete Link' '>\(http[^<]\+\)<' 1 <<< "$PAGE" || return } # Probe a download URL # $1: cookie file (unused here) # $2: 180upload url # $3: requested capability list # stdout: 1 capability per line 180upload_probe() { local -r URL=$2 local -r REQ_IN=$3 local PAGE REQ_OUT FILE_SIZE PAGE=$(curl -L -b 'lang=english' "$URL") || return if match '

File Not Found

' "$PAGE"; then return $ERR_LINK_DEAD fi REQ_OUT=c # Note: all info parsed from HTML comments on the page if [[ $REQ_IN = *f* ]]; then parse_tag 'center nowrap' 'b' <<< "$PAGE" && REQ_OUT="${REQ_OUT}f" fi if [[ $REQ_IN = *s* ]]; then FILE_SIZE=$(parse_tag 'Size:' 'small' <<< "$PAGE") && \ FILE_SIZE=${FILE_SIZE#(} && FILE_SIZE=${FILE_SIZE% bytes)} && \ echo "$FILE_SIZE" && REQ_OUT="${REQ_OUT}s" fi echo $REQ_OUT } 1fichier.sh000066400000000000000000000363161320177646300131260ustar00rootroot00000000000000# Plowshare 1fichier.com module # Copyright (c) 2011 halfman # Copyright (c) 2012-2015 Plowshare team # # This file is part of Plowshare. # # Plowshare is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Plowshare is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Plowshare. If not, see . MODULE_1FICHIER_REGEXP_URL='https\?://\(.*\.\)\?\(1fichier\.\(com\|net\|org\|fr\)\|alterupload\.com\|cjoint\.\(net\|org\)\|desfichiers\.\(com\|net\|org\|fr\)\|dfichiers\.\(com\|net\|org\|fr\)\|megadl\.fr\|mesfichiers\.\(net\|org\)\|piecejointe\.\(net\|org\)\|pjointe\.\(com\|net\|org\|fr\)\|tenvoi\.\(com\|net\|org\)\|dl4free\.com\)' MODULE_1FICHIER_DOWNLOAD_OPTIONS=" AUTH,a,auth,a=USER:PASSWORD,Premium account LINK_PASSWORD,p,link-password,S=PASSWORD,Used in password-protected files RESTRICT,,restrictip,,Restrict login session to my IP address" MODULE_1FICHIER_DOWNLOAD_RESUME=yes MODULE_1FICHIER_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no MODULE_1FICHIER_DOWNLOAD_SUCCESSIVE_INTERVAL= MODULE_1FICHIER_UPLOAD_OPTIONS=" AUTH,a,auth,a=USER:PASSWORD,User account LINK_PASSWORD,p,link-password,S=PASSWORD,Protect a link with a password FOLDER,,folder,s=FOLDER,Folder to upload files into (support subfolders) MESSAGE,d,message,S=MESSAGE,Set file message (is send with notification email) DOMAIN,,domain,N=ID,You can set domain ID to upload (ID can be found at http://www.1fichier.com/en/api/web.html) TOEMAIL,,email-to,e=EMAIL, field for notification email RESTRICT,,restrictip,,Restrict login session to my IP address" MODULE_1FICHIER_UPLOAD_REMOTE_SUPPORT=no MODULE_1FICHIER_LIST_OPTIONS="" MODULE_1FICHIER_LIST_HAS_SUBFOLDERS=no MODULE_1FICHIER_DELETE_OPTIONS="" MODULE_1FICHIER_PROBE_OPTIONS="" # Static function. Proceed with login 1fichier_login() { local -r AUTH=$1 local -r COOKIE_FILE=$2 local -r BASE_URL=$3 local LOGIN_DATA LOGIN_RESULT SID # Long session lt=on # Restrict the session to my IP address purge=on # Purge old sessions restrict=on LOGIN_DATA='mail=$USER&pass=$PASSWORD<=on&purge=on&secure=on&Login=Login' [ -z "$RESTRICT" ] || LOGIN_DATA="$LOGIN_DATA&restrict=on" LOGIN_RESULT=$(post_login "$AUTH" "$COOKIE_FILE" "$LOGIN_DATA" \ "$BASE_URL/login.pl") || return # You are logged in. This page will redirect you. SID=$(parse_cookie_quiet 'SID' < "$COOKIE_FILE") || return [ -n "$SID" ] || return $ERR_LOGIN_FAILED #PAGE=$(curl -b "$COOKIE_FILE" -b 'LG=en' 'https://1fichier.com/console/index.pl') || return } # Static function. Proper way to get file information # $1: 1fichier url # stdout: string (with ; as separator) 1fichier_checklink() { local S FID S=$(curl --form-string "links[]=$1" 'https://1fichier.com/check_links.pl') || return # Note: Password protected links return # url;;;PRIVATE if [ "${S##*;}" = 'BAD LINK' ]; then log_debug 'obsolete link format?' return $ERR_LINK_DEAD elif [ "${S##*;}" = 'NOT FOUND' ]; then return $ERR_LINK_DEAD fi echo "$S" } # Output a 1fichier file download URL # $1: cookie file (account only) # $2: 1fichier url # stdout: real file download link # # Note: Consecutive HTTP requests must be delayed (>10s). # Otherwise you'll get the parallel download message. 1fichier_download() { local -r COOKIE_FILE=$1 local URL=$(replace 'http://' 'https://' <<< "$2") local FID PAGE FILE_URL FILE_NAME WAIT CV SESS FID=$(parse_quiet . '://\([[:alnum:]]*\)\.' <<< "$URL") if [ -n "$FID" ] && [ "$FID" != '1fichier' ]; then URL="https://1fichier.com/?$FID" fi if CV=$(storage_get 'cookie_file'); then echo "$CV" >"$COOKIE_FILE" # Check for expired session PAGE=$(curl -b "$COOKIE_FILE" -b LG=en "https://1fichier.com/console/index.pl") || return if ! match '>[[:space:]]*\(My files\|Logout\)<' "$PAGE"; then log_error 'Expired session, delete cache entry' storage_set 'cookie_file' echo 1 return $ERR_LINK_TEMP_UNAVAILABLE fi SESS=$(parse_cookie 'SID' < "$COOKIE_FILE") log_debug "session (cached): '$SESS'" elif [ -n "$AUTH" ]; then 1fichier_login "$AUTH" "$COOKIE_FILE" 'https://1fichier.com' || return storage_set 'cookie_file' "$(cat "$COOKIE_FILE")" SESS=$(parse_cookie 'SID' < "$COOKIE_FILE") log_debug "session (new): '$SESS'" fi FILE_URL=$(curl --head -b "$COOKIE_FILE" "$URL" | \ grep_http_header_location_quiet) PAGE=$(1fichier_checklink "$URL") || return IFS=';' read -r _ FILE_NAME _ <<< "$PAGE" if [ -z "$FILE_NAME" ]; then log_error 'This must be a direct download link with password, filename will be wrong!' fi if [ -n "$FILE_URL" ]; then echo "$FILE_URL" echo "$FILE_NAME" return 0 fi PAGE=$(curl -b 'LG=en' "$URL") || return # Location: http://www.1fichier.com/?c=SCAN if match 'MOVED - TEMPORARY_REDIRECT' "$PAGE"; then return $ERR_LINK_TEMP_UNAVAILABLE fi # The requested file could not be found # The file may have been deleted by its owner. # The requested file has been deleted following an abuse request. if match 'The \(requested \)\?file \(could not be found\|.*been deleted\)' "$PAGE"; then return $ERR_LINK_DEAD fi # Warning ! Without premium status, you can download only one file at a time if match 'Warning ! Without premium status,' "$PAGE"; then log_error 'No parallel download allowed.' echo 300 return $ERR_LINK_TEMP_UNAVAILABLE # Warning !
Without subscription, you must wait between downloads.
You must wait 9 minutes
elif match 'you must wait between downloads.' "$PAGE"; then WAIT=$(parse 'Without subscription,' 'You must wait \([[:digit:]]\+\) minute' 1 <<< "$PAGE") || WAIT=1 echo $((WAIT * 60)) return $ERR_LINK_TEMP_UNAVAILABLE # Please wait until the file has been scanned by our anti-virus elif match 'Please wait until the file has been scanned' "$PAGE"; then log_error 'File is scanned for viruses.' return $ERR_LINK_TEMP_UNAVAILABLE fi # Accessing this file is protected by password.
Please put it on the box bellow : if match 'name="pass"' "$PAGE"; then if [ -z "$LINK_PASSWORD" ]; then LINK_PASSWORD=$(prompt_for_password) || return fi FILE_URL=$(curl -i -F "pass=$LINK_PASSWORD" "$URL" | \ grep_http_header_location_quiet) || return test "$FILE_URL" || return $ERR_LINK_PASSWORD_REQUIRED echo "$FILE_URL" echo "$FILE_NAME" return 0 fi # Access to download PAGE=$(curl --include -b "$COOKIE_FILE" -b 'LG=en' -d '' \ --referer "$URL" "$URL") || return # Authenticated download with forced menu FILE_URL=$(grep_http_header_location_quiet <<< "$PAGE") # Click here to download the file if [ -z "$FILE_URL" ] ; then FILE_URL=$(parse 'class="ok btn-general btn-orange"' '\$]" "$NAME"; then log_error "Folder may not contain the next characters: \"'\`\\<>\$" return $ERR_FATAL fi # We begin in the root directory (DIR_ID=0) DIR_ID=0 # Convert subdirectory names into an array. IFS='/' read -a DIR_NAMES <<< "$NAME" for BASE in "${DIR_NAMES[@]}"; do # Skip empty names. [ -z "$BASE" ] && continue log_debug 'Getting folder data' PAGE=$(curl -b "$COOKIE_FILE" -b 'LG=en' "$BASE_URL/console/files.pl?dir_id=$DIR_ID&oby=0&search=") || return # Replace [ and ] with \[ and \] for match and parse function BASE_REPLACE="$BASE" BASE_REPLACE=$(replace_all '[' '\[' <<< "$BASE_REPLACE") BASE_REPLACE=$(replace_all ']' '\]' <<< "$BASE_REPLACE") log_debug "Replace '$BASE' with '$BASE_REPLACE' for match and parse function" # Create folder if not exist # class="dF">$BASE< if ! match "class=\"dF\">$BASE_REPLACE<" "$PAGE"; then log_debug "Creating folder: '$BASE'" RESPONSE=$(curl -b "$COOKIE_FILE" -b 'LG=en' -L \ -d "mkdir=$(uri_encode_strict <<< "$BASE")" \ -d "dir_id=$DIR_ID" \ "$BASE_URL/console/mkdir.pl") || return if [ "$RESPONSE" != 'Folder created successfully' ]; then if [ -z "$RESPONSE" ]; then log_error 'Could not create folder.' else log_error "Create folder error: $RESPONSE" fi return $ERR_FATAL fi # Grab the page again to have the DIR_ID of the new directory PAGE=$(curl -b "$COOKIE_FILE" -b 'LG=en' "$BASE_URL/console/files.pl?dir_id=$DIR_ID&oby=0&search=") || return fi # class=" directory" rel="$DIR_ID">
$BASE< DIR_ID=$(parse . "rel=\"\([[:digit:]]\+\)\">
$BASE_REPLACE<" <<< "$PAGE") || return done log_debug "DIR ID: '$DIR_ID'" echo $DIR_ID } # Upload a file to 1fichier # $1: cookie file # $2: input file (with full path) # $3: remote filename # stdout: download + del link 1fichier_upload() { local -r COOKIE_FILE=$1 local -r FILE=$2 local -r DESTFILE=$3 local -r UPLOADURL='https://upload.1fichier.com' local LOGIN_DATA S_ID RESPONSE DOWNLOAD_ID REMOVE_ID DOMAIN_ID DIR_ID if CV=$(storage_get 'cookie_file'); then echo "$CV" >"$COOKIE_FILE" # Check for expired session PAGE=$(curl -b "$COOKIE_FILE" -b LG=en "https://1fichier.com/console/index.pl") || return if ! match '>[[:space:]]*\(My files\|Logout\)<' "$PAGE"; then log_error 'Expired session, delete cache entry' storage_set 'cookie_file' echo 1 return $ERR_LINK_TEMP_UNAVAILABLE fi SESS=$(parse_cookie 'SID' < "$COOKIE_FILE") log_debug "session (cached): '$SESS'" elif [ -n "$AUTH" ]; then 1fichier_login "$AUTH" "$COOKIE_FILE" 'https://1fichier.com' || return storage_set 'cookie_file' "$(cat "$COOKIE_FILE")" SESS=$(parse_cookie 'SID' < "$COOKIE_FILE") log_debug "session (new): '$SESS'" fi if [ -n "$FOLDER" ]; then if [ -n "$SESS" ]; then DIR_ID=$(1fichier_check_folder "$FOLDER" "$COOKIE_FILE" 'https://1fichier.com') || return else log_error "Folder option cannot be used without an account." return $ERR_FATAL fi fi S_ID=$(random ll 10) RESPONSE=$(curl_with_log -b "$COOKIE_FILE" \ --form-string "message=$MESSAGE" \ --form-string "mail=$TOEMAIL" \ -F "dpass=$LINK_PASSWORD" \ -F "domain=${DOMAIN:-0}" \ -F "file[]=@$FILE;filename=$DESTFILE" \ -F "did=$DIR_ID" \ "$UPLOADURL/upload.cgi?id=$S_ID") || return RESPONSE=$(curl --header 'EXPORT:1' -b "$COOKIE_FILE" \ "$UPLOADURL/end.pl?xid=$S_ID") || return # filename;filesize;dlid;rmid,domain;?? IFS=";" read -r _ _ DOWNLOAD_ID REMOVE_ID DOMAIN_ID _ <<< "$RESPONSE" local -a DOMAIN_STR=('1fichier.com' 'alterupload.com' 'cjoint.net' 'desfichiers.com' \ 'dfichiers.com' 'megadl.fr' 'mesfichiers.net' 'piecejointe.net' 'pjointe.com' \ 'tenvoi.com' 'dl4free.com' ) if [[ $DOMAIN_ID -gt 10 || $DOMAIN_ID -lt 0 ]]; then log_error 'Bad domain ID response, maybe API updated?' return $ERR_FATAL fi echo "https://${DOMAIN_STR[$DOMAIN_ID]}/?${DOWNLOAD_ID}" echo "https://${DOMAIN_STR[$DOMAIN_ID]}/remove/$DOWNLOAD_ID/$REMOVE_ID" } # Delete a file uploaded to 1fichier # $1: cookie file (unused here) # $2: delete url 1fichier_delete() { local URL=$2 local PAGE if match '/bg/remove/' "$URL"; then URL=$(echo "$URL" | replace '/bg/' '/en/') elif ! match '/en/remove/' "$URL"; then URL=$(echo "$URL" | replace '/remove/' '/en/remove/') fi PAGE=$(curl "$URL") || return # Invalid link - File not found if match 'File not found' "$PAGE"; then return $ERR_LINK_DEAD fi PAGE=$(curl "$URL" -F 'force=1') || return #
The file has been destroyed
if ! match 'file has been' "$PAGE"; then log_debug 'unexpected result, site updated?' return $ERR_FATAL fi } # List a 1fichier folder # $1: 1fichier folder link # $2: recurse subfolders (null string means not selected) # stdout: list of links 1fichier_list() { local URL=$1 local PAGE LINKS NAMES if ! match '/dir/' "$URL"; then log_error 'This is not a directory list' return $ERR_FATAL fi if match '/../dir/' "$URL"; then local BASE_URL DIR_ID BASE_URL=$(basename_url "$URL") DIR_ID=${URL##*/} URL="$BASE_URL/dir/$DIR_ID" fi PAGE=$(curl -L "$URL") || return LINKS=$(echo "$PAGE" | parse_all_attr_quiet 'T.l.chargement de' href) NAMES=$(echo "$PAGE" | parse_all_tag_quiet 'T.l.chargement de' a) test "$LINKS" || return $ERR_LINK_DEAD list_submit "$LINKS" "$NAMES" || return } # Probe a download URL # $1: cookie file (unused here) # $2: 1fichier url # $3: requested capability list 1fichier_probe() { local URL=${2%/} local -r REQ_IN=$3 local FID RESPONSE FILE_NAME FILE_SIZE FID=$(parse_quiet . '://\([[:alnum:]]*\)\.' <<< "$URL") if [ -n "$FID" ] && [ "$FID" != '1fichier' ]; then URL="https://1fichier.com/?$FID" fi RESPONSE=$(1fichier_checklink "$URL") || return # url;filename;filesize IFS=';' read -r URL FILE_NAME FILE_SIZE <<< "$RESPONSE" REQ_OUT=c if [[ $REQ_IN = *f* ]]; then if [[ $FILE_NAME ]]; then echo "$FILE_NAME" REQ_OUT="${REQ_OUT}f" else log_debug 'empty filename: file must be private or password protected' fi fi if [[ $REQ_IN = *i* ]]; then echo "$FID" REQ_OUT="${REQ_OUT}i" fi if [[ $REQ_IN = *s* ]]; then echo "$FILE_SIZE" REQ_OUT="${REQ_OUT}s" fi if [[ $REQ_IN = *v* ]]; then echo "$URL" REQ_OUT="${REQ_OUT}v" fi echo $REQ_OUT } 2shared.sh000066400000000000000000000154571320177646300127670ustar00rootroot00000000000000# Plowshare 2share.com module # Copyright (c) 2010-2013 Plowshare team # # This file is part of Plowshare. # # Plowshare is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Plowshare is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Plowshare. If not, see . MODULE_2SHARED_REGEXP_URL='http://\(www\.\)\?2shared\.com/\(file\|document\|fadmin\|photo\|audio\|video\)/' MODULE_2SHARED_DOWNLOAD_OPTIONS=" AUTH_FREE,b,auth-free,a=EMAIL:PASSWORD,Free account" MODULE_2SHARED_DOWNLOAD_RESUME=yes MODULE_2SHARED_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=unused MODULE_2SHARED_DOWNLOAD_SUCCESSIVE_INTERVAL= MODULE_2SHARED_UPLOAD_OPTIONS=" AUTH_FREE,b,auth-free,a=EMAIL:PASSWORD,Free account (mandatory)" MODULE_2SHARED_UPLOAD_REMOTE_SUPPORT=no MODULE_2SHARED_DELETE_OPTIONS=" AUTH_FREE,b,auth-free,a=EMAIL:PASSWORD,Free account" MODULE_2SHARED_PROBE_OPTIONS="" # Static function. Proceed with login # $1: authentication # $2: cookie file # $3: base URL 2shared_login() { local AUTH=$1 local COOKIE_FILE=$2 local BASE_URL=$3 local LOGIN_DATA JSON_RESULT ERR LOGIN_DATA='login=$USER&password=$PASSWORD&callback=jsonp' JSON_RESULT=$(post_login "$AUTH_FREE" "$COOKIE_FILE" "$LOGIN_DATA" \ "$BASE_URL/login") || return # {"ok":true,"rejectReason":"","loginRedirect":"http://... # Set-Cookie: Login Password if match_json_true 'ok' "$JSON_RESULT"; then return 0 fi ERR=$(echo "$JSON_RESULT" | parse_json 'rejectReason') log_debug "Remote error: $ERR" return $ERR_LOGIN_FAILED } # Output a 2shared file download URL # $1: cookie file (unused here) # $2: 2shared url # stdout: real file download link 2shared_download() { local -r COOKIE_FILE=$1 local -r URL=$2 local -r BASE_URL='http://www.2shared.com' local PAGE FILE_URL FILE_NAME WAIT_LINE WAIT_TIME # .htm are redirected to .html if [ -n "$AUTH_FREE" ]; then 2shared_login "$AUTH_FREE" "$COOKIE_FILE" "$BASE_URL" || return PAGE=$(curl -L -b "$COOKIE_FILE" "$URL") || return else PAGE=$(curl -L "$URL") || return fi if match 'file link that you requested is not valid' "$PAGE"; then return $ERR_LINK_DEAD fi # We are sorry, but your download request can not be processed right now. if match 'id="timeToWait"' "$PAGE"; then WAIT_LINE=$(echo "$PAGE" | parse_tag 'timeToWait' span) WAIT_TIME=${WAIT_LINE%% *} if match 'minute' "$WAIT_LINE"; then echo $(( WAIT_TIME * 60 )) else echo $((WAIT_TIME)) fi return $ERR_LINK_TEMP_UNAVAILABLE elif match '/photo/' "$URL"; then FILE_URL=$(echo "$PAGE" | parse 'retrieveLink\.jsp' "get('\([^']*\)") FILE_URL=$(curl "$BASE_URL$FILE_URL") || return else FILE_URL=$(parse_form_input_by_name 'd3link' <<< "$PAGE") || return fi FILE_NAME=$(echo "$PAGE" | parse_tag title | parse . '^\(.*\) download - 2shared$') echo "$FILE_URL" echo "$FILE_NAME" } # Upload a file to 2shared.com # $1: cookie file # $2: input file (with full path) # $3: remote filename # stdout: 2shared.com download + admin link 2shared_upload() { local -r COOKIE_FILE=$1 local -r FILE=$2 local -r DESTFILE=$3 local -r BASE_URL='http://www.2shared.com' local PAGE FORM_HTML FORM_ACTION FORM_DC COMPLETE DL_URL AD_URL test "$AUTH_FREE" || return $ERR_LINK_NEED_PERMISSIONS 2shared_login "$AUTH_FREE" "$COOKIE_FILE" "$BASE_URL" || return PAGE=$(curl -b "$COOKIE_FILE" "$BASE_URL") || return COMPLETE=$(echo "$PAGE" | parse 'uploadComplete' 'location="\([^"]*\)"') FORM_HTML=$(grep_form_by_name "$PAGE" 'uploadForm') || return FORM_ACTION=$(echo "$FORM_HTML" | parse_form_action) || return FORM_DC=$(echo "$FORM_HTML" | parse_form_input_by_name 'mainDC') || return PAGE=$(curl_with_log -b "$COOKIE_FILE" \ -F "mainDC=$FORM_DC" -F 'x=0' -F 'y=0' \ -F "fff=@$FILE;filename=$DESTFILE" \ "$FORM_ACTION") || return # Your upload has successfully completed! if ! match 'upload has successfully completed' "$PAGE"; then log_error 'upload failure' return $ERR_FATAL fi PAGE=$(curl -b "$COOKIE_FILE" "$BASE_URL$COMPLETE") || return DL_URL=$(echo "$PAGE" | parse_attr '/\(file\|document\|photo\|audio\|video\)/' action) || return AD_URL=$(echo "$PAGE" | parse_attr '/fadmin/' action) echo "$DL_URL" echo echo "$AD_URL" } # Delete a file uploaded on 2shared # $1: cookie file # $2: admin url 2shared_delete() { local COOKIE_FILE=$1 local URL=$2 local BASE_URL='http://www.2shared.com' local ADMIN_PAGE FORM DL_LINK AD_LINK if [ -n "$AUTH_FREE" ]; then 2shared_login "$AUTH_FREE" "$COOKIE_FILE" "$BASE_URL" || return fi # 2Shared bug (2012-06): deleted files stays in the list of "My files" ADMIN_PAGE=$(curl -c "$COOKIE_FILE" -b "$COOKIE_FILE" "$URL") || return if ! match 'Delete File' "$ADMIN_PAGE"; then return $ERR_LINK_DEAD fi FORM=$(grep_form_by_name "$ADMIN_PAGE" 'theForm') || return DL_LINK=$(echo "$FORM" | parse_form_input_by_name 'downloadLink' | uri_encode_strict) AD_LINK=$(echo "$FORM" | parse_form_input_by_name 'adminLink' | uri_encode_strict) curl -b "$COOKIE_FILE" --referer "$URL" -o /dev/null \ -d "adminLink=$AD_LINK" \ -d "downloadLink=$DL_LINK" \ -d 'resultMode=2&password=&description=&publisher=' \ "$URL" || return # Can't parse for success, we get redirected to main page } # Probe a download URL # $1: cookie file (unused here) # $2: 2shared.com url # $3: requested capability list 2shared_probe() { local -r REQ_IN=$3 local PAGE REQ_OUT FILE_SIZE PAGE=$(curl --location "$URL") || return # The file link that you requested is not valid. if match 'file link that you requested is not valid' "$PAGE"; then return $ERR_LINK_DEAD fi REQ_OUT=c if [[ $REQ_IN = *f* ]]; then echo "$PAGE" | parse_tag h1 | html_to_utf8 && REQ_OUT="${REQ_OUT}f" fi if [[ $REQ_IN = *s* ]]; then FILE_SIZE=$(echo "$PAGE" | parse '>File size' \ '^[[:blank:]]*\([[:digit:]]\+\(.[[:digit:]]\+\)\?[[:space:]][KMG]\?B\)' 1) && translate_size "${FILE_SIZE/,/}" && REQ_OUT="${REQ_OUT}s" fi if [[ $REQ_IN = *i* ]]; then parse 'action=' '"/complete/\([^/]\+\)' <<< "$PAGE" && REQ_OUT="${REQ_OUT}i" fi echo $REQ_OUT } 4share_vn.sh000066400000000000000000000131001320177646300133070ustar00rootroot00000000000000# Plowshare 4share.vn module # Copyright (c) 2013 Plowshare team # # This file is part of Plowshare. # # Plowshare is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Plowshare is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Plowshare. If not, see . MODULE_4SHARE_VN_REGEXP_URL='http://up\.4share\.vn/\(d\|f\)/[[:alnum:]]\+' MODULE_4SHARE_VN_DOWNLOAD_OPTIONS="" MODULE_4SHARE_VN_DOWNLOAD_RESUME=no MODULE_4SHARE_VN_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no MODULE_4SHARE_VN_DOWNLOAD_SUCCESSIVE_INTERVAL= MODULE_4SHARE_VN_UPLOAD_OPTIONS="" MODULE_4SHARE_VN_UPLOAD_REMOTE_SUPPORT=no MODULE_4SHARE_VN_LIST_OPTIONS="" MODULE_4SHARE_VN_LIST_HAS_SUBFOLDERS=no MODULE_4SHARE_VN_PROBE_OPTIONS="" # Output a 4share.vn file download URL # $1: cookie file # $2: 4share.vn url # stdout: real file download link 4share_vn_download() { local -r COOKIE_FILE=$1 local -r URL=$2 local -r BASE_URL='http://up.4share.vn' local PAGE WAIT_TIME TIME CAPTCHA_IMG FILE_URL FILENAME PAGE=$(curl -c "$COOKIE_FILE" -b "$COOKIE_FILE" "$URL") || return if match 'FID Không hợp lệ!' "$PAGE" || \ match 'Xin lỗi bạn, File đã bị xóa' "$PAGE"; then return $ERR_LINK_DEAD fi if match 'Đợi .* nữa để Download!' "$PAGE"; then WAIT_TIME=$(parse 'Đợi ' 'Đợi \([^<]\+\)' <<< "$PAGE") || return log_error 'Forced delay between downloads.' echo "$WAIT_TIME" return $ERR_LINK_TEMP_UNAVAILABLE fi WAIT_TIME=$(parse 'var counter=' 'var counter=\([0-9]\+\)' <<< "$PAGE") || return # If captcha solve will take too long TIME=$(date +%s) CAPTCHA_IMG=$(create_tempfile '.jpg') || return curl -b "$COOKIE_FILE" -o "$CAPTCHA_IMG" \ "$BASE_URL/library/captcha1.html" || return local WI WORD ID WI=$(captcha_process "$CAPTCHA_IMG") || return { read WORD; read ID; } <<<"$WI" rm -f "$CAPTCHA_IMG" TIME=$(($(date +%s) - $TIME)) if [ $TIME -lt $WAIT_TIME ]; then WAIT_TIME=$((WAIT_TIME - $TIME)) wait $WAIT_TIME || return fi PAGE=$(curl -i -b "$COOKIE_FILE" \ -d "security_code=$WORD" \ -d 'submit=DOWNLOAD FREE' \ -d 's=' \ "$URL") || return if match 'Bạn đã nhập sai Mã bảo vệ download' "$PAGE"; then log_error 'Wrong captcha.' captcha_nack $ID return $ERR_CAPTCHA fi FILE_URL=$(grep_http_header_location <<< "$PAGE") || return FILENAME=$(parse . '&f=\([^&]\+\)' <<< "$FILE_URL") || return captcha_ack $ID echo "$FILE_URL" echo "$FILENAME" } # Upload a file to 4share.vn # $1: cookie file # $2: input file (with full path) # $3: remote filename # stdout: download link 4share_vn_upload() { local -r COOKIE_FILE=$1 local -r FILE=$2 local -r DEST_FILE=$3 local -r BASE_URL='http://4share.vn' local PAGE MAX_SIZE LINK_DL ERROR MAX_SIZE=209715200 # 200 MiB FILE_SIZE=$(get_filesize "$FILE") if [ "$FILE_SIZE" -gt "$MAX_SIZE" ]; then log_debug "File is bigger than $MAX_SIZE" return $ERR_SIZE_LIMIT_EXCEEDED fi # Does not count files uploaded to free account for some reason, so login dropped for a while PAGE=$(curl_with_log \ -F "Filename=$DEST_FILE" \ -F 'name=public_upload' \ -F 'folder=/files' \ -F "Filedata=@$FILE;filename=$DEST_FILE" \ -F 'Upload=Submit Query' \ "$BASE_URL/upload_script/uploadify1.lib") || return if match 'ERROR' "$PAGE"; then ERROR=$(parse 'ERROR:\([^<]\+\)' <<< "$PAGE") || return log_error "Remote error: $ERROR" return $ERR_FATAL fi LINK_DL=$(parse_attr 'href' <<< "$PAGE") || return echo "$LINK_DL" } # Probe a download URL # $1: cookie file (unused here) # $2: 4share.vn url # $3: requested capability list # stdout: 1 capability per line 4share_vn_probe() { local -r URL=$2 local -r REQ_IN=$3 local PAGE FILE_SIZE REQ_OUT PAGE=$(curl "$URL") || return if match 'FID Không hợp lệ!' "$PAGE" || \ match 'Xin lỗi bạn, File đã bị xóa' "$PAGE"; then return $ERR_LINK_DEAD fi REQ_OUT=c if [[ $REQ_IN = *f* ]]; then parse 'Downloading: ' \ 'Downloading: \([^<]\+\)' <<< "$PAGE" && REQ_OUT="${REQ_OUT}f" fi if [[ $REQ_IN = *s* ]]; then FILE_SIZE=$(parse 'c: ' \ 'Kích thước: \([^<]\+\)' <<< "$PAGE") && \ translate_size "$FILE_SIZE" && REQ_OUT="${REQ_OUT}s" fi echo $REQ_OUT } # List a 4share.vn web folder URL # $1: folder URL # $2: recurse subfolders (null string means not selected) # stdout: list of links and file names (alternating) 4share_vn_list() { local -r URL=$1 local -r REC=$2 local PAGE URL_LIST LINKS NAMES URL_LIST=$(replace '/d/' '/dlist/' <<< "$URL") PAGE=$(curl "$URL_LIST") || return PAGE=$(break_html_lines_alt <<< "$PAGE") LINKS=$(parse_all_quiet '^http://up.4share.vn/f/' '^\([^<]\+\)' <<< "$PAGE") NAMES=$(parse_all_quiet '^http://up.4share.vn/f/' '^http://up.4share.vn/f/[[:alnum:]]\+/\([^<]\+\)' <<< "$PAGE") list_submit "$LINKS" "$NAMES" } 4shared.sh000066400000000000000000000324611320177646300127630ustar00rootroot00000000000000# Plowshare 4shared.com module # Copyright (c) 2010-2014 Plowshare team # # This file is part of Plowshare. # # Plowshare is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Plowshare is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Plowshare. If not, see . MODULE_4SHARED_REGEXP_URL='https\?://\(www\.\)\?4shared\.com/' MODULE_4SHARED_DOWNLOAD_OPTIONS=" AUTH_FREE,b,auth-free,a=USER:PASSWORD,Free account LINK_PASSWORD,p,link-password,S=PASSWORD,Used in password-protected files TORRENT,,torrent,,Get torrent link (instead of direct download link)" MODULE_4SHARED_DOWNLOAD_RESUME=yes MODULE_4SHARED_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=yes MODULE_4SHARED_DOWNLOAD_SUCCESSIVE_INTERVAL=5 MODULE_4SHARED_UPLOAD_OPTIONS=" AUTH_FREE,b,auth-free,a=USER:PASSWORD,Free account (mandatory)" MODULE_4SHARED_UPLOAD_REMOTE_SUPPORT=no MODULE_4SHARED_LIST_OPTIONS=" AUTH_FREE,b,auth-free,a=USER:PASSWORD,Free account DIRECT_LINKS,,direct,,Show direct links (if available) instead of regular ones LINK_PASSWORD,p,link-password,S=PASSWORD,Used in password-protected folder" MODULE_4SHARED_LIST_HAS_SUBFOLDERS=yes MODULE_4SHARED_PROBE_OPTIONS="" # Static function. Proceed with login (tested on free-membership) 4shared_login() { local -r AUTH=$1 local -r COOKIE_FILE=$2 local -r BASE_URL=$3 local LOGIN_DATA NAME LOGIN_DATA='login=$USER&password=$PASSWORD&doNotRedirect=true' post_login "$AUTH" "$COOKIE_FILE" "$LOGIN_DATA" \ "$BASE_URL/web/login" -o /dev/null || return # On success, add cookie entries: Login & Password NAME=$(parse_cookie_quiet 'Login' < "$COOKIE_FILE") if [ -n "$NAME" ]; then return 0 fi return $ERR_LOGIN_FAILED } # Output a 4shared file download URL # $1: cookie file # $2: 4shared url # stdout: real file download link 4shared_download() { local -r COOKIEFILE=$1 local URL=$2 local -r BASE_URL='https://www.4shared.com' local REAL_URL URL PAGE WAIT_URL FILE_URL FILE_NAME if [ -z "$AUTH_FREE" ]; then log_error '4shared does not allow anonymous file download. Add --auth-free option.' return $ERR_LINK_NEED_PERMISSIONS fi REAL_URL=$(curl -I "$URL" | grep_http_header_location_quiet) || return if test "$REAL_URL"; then URL=$REAL_URL fi if [ -n "$AUTH_FREE" ]; then 4shared_login "$AUTH_FREE" "$COOKIEFILE" "$BASE_URL" || return # add new entries in $COOKIEFILE PAGE=$(curl -b "$COOKIEFILE" -c "$COOKIEFILE" -b '4langcookie=en' "$URL") || return else PAGE=$(curl -c "$COOKIEFILE" -b '4langcookie=en' "$URL") || return fi if match '4shared\.com/dir/' "$URL"; then log_error 'This is a directory list, use plowlist!' return $ERR_FATAL elif match 'The file link that you requested is not valid.' "$PAGE"; then return $ERR_LINK_DEAD fi # You must enter a password to access this file. if match 'enter a password to access' "$PAGE"; then log_debug 'File is password protected' if [ -z "$LINK_PASSWORD" ]; then LINK_PASSWORD=$(prompt_for_password) || return fi local FORM_HTML FORM_ACTION FORM_DSID FORM_HTML=$(grep_form_by_name "$PAGE" 'theForm') || return FORM_ACTION=$(echo "$FORM_HTML" | parse_form_action) || return FORM_DSID=$(echo "$FORM_HTML" | parse_form_input_by_name 'dsid') PAGE=$(curl -b "$COOKIEFILE" -c "$COOKIEFILE" -b '4langcookie=en' \ -d "userPass2=$LINK_PASSWORD" \ -d "dsid=$FORM_DSID" \ "$FORM_ACTION") || return # The password you have entered is not valid if match 'enter a password to access' "$PAGE"; then return $ERR_LINK_PASSWORD_REQUIRED fi fi # Try to figure real filename from HTML # - trinityConfig.globalFileName = '...' FILE_NAME=$(parse_all_quiet 'trinityConfig.globalFileName' '=[[:space:]]*["'\'']\([^/"'\'']*\)' <<< "$PAGE") # -

...

if [ -z "$FILE_NAME" ]; then FILE_NAME=$(echo "$PAGE" | parse_tag_quiet '=.fileName' 'h1') fi # - # Warning: filename without extension if [ -z "$FILE_NAME" ]; then FILE_NAME=$(echo "$PAGE" | parse_attr 'og:title' 'content') fi # Special case for /photo/ URLs FILE_URL=$(echo "$PAGE" | parse_attr_quiet '?forceAttachmentDownload=' href) if [ -n "$FILE_URL" ]; then echo "$FILE_URL" return 0 fi WAIT_URL=$(parse_attr '4shared\.com/get/' href <<< "$PAGE") || return # Note: There is a strange entry required in cookie file: efdcyqLAT_3Q=1 WAIT_HTML=$(curl -L -b "$COOKIEFILE" -b '4langcookie=en' --referer "$URL" "$WAIT_URL") || return # Redirected in case of error if [ -z "$WAIT_HTML" ]; then URL=$(curl -I -b "$COOKIEFILE" "$WAIT_URL" | grep_http_header_location) if match 'err=not-logged$' "$URL"; then return $ERR_LINK_NEED_PERMISSIONS else log_error "Unexpected redirection: $URL" return $ERR_FATAL fi fi if match 'Login
to download this file' "$WAIT_HTML"; then return $ERR_LINK_NEED_PERMISSIONS fi #
20
WAIT_TIME=$(echo "$WAIT_HTML" | parse_tag_quiet 'downloadDelayTimeSec' 'div') test -z "$WAIT_TIME" && WAIT_TIME=20 # Sanity check if match 'The file link that you requested is not valid\.' "$WAIT_HTML"; then return $ERR_LINK_DEAD fi if [ -z "$TORRENT" ]; then FILE_URL=$(echo "$WAIT_HTML" | parse_attr_quiet 'baseDownloadLink' value) if [ -z "$FILE_URL" ]; then FILE_URL=$(echo "$WAIT_HTML" | parse 'window\.location' '= "\([^"]*\)') || return fi else MODULE_4SHARED_DOWNLOAD_RESUME=no FILE_URL=$(echo "$WAIT_HTML" | parse_attr 'download-torrent' href) || return FILE_NAME="${FILE_NAME}.torrent" fi wait $((WAIT_TIME)) seconds || return echo "$FILE_URL" echo "$FILE_NAME" } # Upload a file to 4shared # $1: cookie file # $2: input file (with full path) # $3: remote filename # stdout: download + del link 4shared_upload() { local -r COOKIE_FILE=$1 local -r FILE=$2 local -r DESTFILE=$3 local BASE_URL='http://www.4shared.com' local PAGE JSON DESTFILE_ENC UP_URL DL_URL FILE_ID DIR_ID LOGIN_ID PASS_HASH local SZ SIZE_LIMIT test "$AUTH_FREE" || return $ERR_LINK_NEED_PERMISSIONS 4shared_login "$AUTH_FREE" "$COOKIE_FILE" "$BASE_URL" || return PAGE=$(curl -b "$COOKIE_FILE" "$BASE_URL/account/home.jsp") || return DIR_ID=$(parse 'AjaxFacade\.rootDirId' \ "=[[:space:]]*'\([^']\+\)" <<< "$PAGE") || return # Not required. Example: {"freeSpace":16102203291} JSON=$(curl -b "$COOKIE_FILE" "$BASE_URL/rest/account/freeSpace?dirId=$DIR_ID") || return SZ=$(get_filesize "$FILE") SIZE_LIMIT=$(echo "$JSON" | parse_json freeSpace) || return if [ "$SZ" -gt "$SIZE_LIMIT" ]; then log_debug "file is bigger than $SIZE_LIMIT" return $ERR_SIZE_LIMIT_EXCEEDED fi DESTFILE_ENC=$(echo "$DESTFILE" | uri_encode_strict) LOGIN_ID=$(parse_cookie 'Login' < "$COOKIE_FILE") || return PASS_HASH=$(parse_cookie 'Password' < "$COOKIE_FILE") || return JSON=$(curl -b "$COOKIE_FILE" -X POST \ -H "x-security: $LOGIN_ID" \ "$BASE_URL/rest/sharedFileUpload/create?dirId=$DIR_ID&name=$DESTFILE_ENC&size=$SZ") || return # {"status":true,"url":"","http://... if ! match_json_true 'status' "$JSON"; then return $ERR_FATAL fi UP_URL=$(echo "$JSON" | parse_json url) || return DL_URL=$(echo "$JSON" | parse_json d1link) || return FILE_ID=$(echo "$JSON" | parse_json fileId) || return DIR_ID=$(echo "$JSON" | parse_json uploadDir) JSON=$(curl_with_log -X POST --data-binary "@$FILE" \ -H "x-root-dir: $DIR_ID" \ -H "x-upload-dir: $DIR_ID" \ -H "x-file-name: $DESTFILE_ENC" \ -H "Content-Type: application/octet-stream" \ "$UP_URL&resumableFileId=$FILE_ID&resumableFirstByte=0§ionSize=$SZ&cuid=$LOGIN_ID&cupa=$PASS_HASH") || return # I should get { "status": "OK", "uploadedFileId": -1, "fileUploadUrl": "" } local STATUS ERR STATUS=$(echo "$JSON" | parse_json_quiet status) if [ "$STATUS" != 'OK' ]; then ERR=$(echo "$JSON" | parse_json Message) log_debug "Bad status: $STATUS" test "$ERR" && log_error "Remote error: $ERR" return $ERR_FATAL fi BASE_URL=$(basename_url "$UP_URL") JSON=$(curl -X POST -H 'Content-Type: ' \ -H "x-root-dir: $DIR_ID" \ -H "x-cookie: Login=$LOGIN_ID; Password=$PASS_HASH;" \ "$BASE_URL/rest/sharedFileUpload/finish?fileId=$FILE_ID") || return log_debug "JSON: '$JSON'" # {"status":true} #if ! match_json_true 'status' "$JSON"; then # log_error 'bad answer, file moved to Incompleted folder' # return $ERR_FATAL #fi echo "$DL_URL" } # List a 4shared folder URL # $1: 4shared.com link # $2: recurse subfolders (null string means not selected) # stdout: list of links 4shared_list() { local URL=$(echo "$1" | replace '/folder/' '/dir/') local BASE_URL='https://www.4shared.com' local COOKIE_FILE RET=0 # There are two views: # - Simple view link (URL with /folder/) # - Advanced view link (URL with /dir/) if ! match '4shared\.com/dir/' "$URL"; then log_error 'This is not a directory list' return $ERR_FATAL fi COOKIE_FILE=$(create_tempfile) || return 4shared_login "$AUTH_FREE" "$COOKIE_FILE" "$BASE_URL" || RET=$? if [ $RET -eq 0 ]; then 4shared_list_rec "$2" "$URL" "$COOKIE_FILE" || RET=$? fi rm -f "$COOKIE_FILE" return $RET } # static recursive function # $1: recursive flag # $2: web folder URL # $3: cookie file 4shared_list_rec() { local REC=$1 local URL=$2 local -r COOKIE_FILE=$3 local -r BASE_URL='http://www.4shared.com' local PAGE LINKS NAMES RET LINE SID DIR_ID JSON RET=$ERR_LINK_DEAD PAGE=$(curl -c "$COOKIE_FILE" -b "$COOKIE_FILE" -b '4langcookie=en' \ "$URL") || return # Please enter a password to access this folder if match 'enter a password to access' "$PAGE"; then log_debug 'Folder is password protected' if [ -z "$LINK_PASSWORD" ]; then LINK_PASSWORD=$(prompt_for_password) || return fi local FORM_HTML FORM_ACTION FORM_DSID FORM_HTML=$(grep_form_by_name "$PAGE" 'theForm') || return FORM_ACTION=$(echo "$FORM_HTML" | parse_form_action) || return FORM_DSID=$(echo "$FORM_HTML" | parse_form_input_by_name 'dsid') PAGE=$(curl -c "$COOKIE_FILE" -b "$COOKIE_FILE" -b '4langcookie=en' \ -d "userPass2=$LINK_PASSWORD" \ -d "dsid=$FORM_DSID" \ "$FORM_ACTION") || return # The password you have entered is not valid if match 'enter a password to access' "$PAGE"; then return $ERR_LINK_PASSWORD_REQUIRED fi fi # Sanity chech if match 'src="/images/spacer.gif" class="warn"' "$PAGE"; then log_error 'Site updated ?' return $ERR_FATAL fi if test "$DIRECT_LINKS"; then log_debug 'Note: provided links are temporary! Use "curl -J -O" on it.' LINKS=$(echo "$PAGE" | \ parse_all_attr_quiet 'class="icon16 download"' href) list_submit "$LINKS" && RET=0 else DIR_ID=$(echo "$PAGE" | parse 'AjaxFacade\.rootDirId' \ "=[[:space:]]*'\([^']\+\)") || return JSON=$(curl -b "$COOKIE_FILE" -b '4langcookie=en' -d "dirId=$DIR_ID" \ "$BASE_URL/web/accountActions/changeDir") || return LINKS=$(parse_json 'id' split <<<"$JSON") NAMES=$(parse_json 'name' split <<<"$JSON") list_submit "$LINKS" "$NAMES" "$BASE_URL/file/" '/' && RET=0 fi # Are there any subfolders? if test "$REC"; then LINKS=$(parse_all_quiet ':changeDir(' '(\([[:digit:]]\+\)' <<< "$PAGE") SID=$(parse_form_input_by_name 'sId' <<< "$PAGE") || return while read LINE; do test "$LINE" || continue URL="http://www.4shared.com/account/changedir.jsp?sId=$SID&ajax=false&changedir=$LINE&random=0" log_debug "entering sub folder: $URL" 4shared_list_rec "$REC" "$URL" "$COOKIE_FILE" && RET=0 done <<< "$LINKS" fi return $RET } # Probe a download URL # $1: cookie file (unused here) # $2: 4shared.com url # $3: requested capability list # stdout: 1 capability per line 4shared_probe() { local -r URL=$2 local -r REQ_IN=$3 local PAGE REQ_OUT PAGE=$(curl --location -b '4langcookie=en' "$URL") || return if ! match ' &>log.txt ``` ``` plowup -v4 -r0 --no-plowsharerc --no-color &>log.txt ``` *Attention*: Generated logs can contain your credentials (account login data specified with `-a` or `-b` command line switches). Be sure to remove them before posting. #### **How to submit a patch ?** Before submitting your patch, check that your work complies with [code policy](https://github.com/mcrapet/plowshare/wiki/Modules) (refer to last chapters). If this is okay, you can create a [new pull request](https://github.com/mcrapet/plowshare-modules-legacy/pulls/). Thanks! CONTRIBUTORS000066400000000000000000000033451320177646300127540ustar00rootroot00000000000000Alex Rea Andres Vargas Antoine Girard Arnau Sanchez Bach Le Baptiste Bastien Cecchinato Ben Zho <0xbzho@gmail.com> Calvin Spencer Kwok (rEtSaMfF) capkokoon Chu Chong Meng Steven David Kurz (MDXDave) ericb48 Fabiano Francesconi GaspardT (Fullmono) Golam Sarwar Hervé idleloop Jakub Wilk Jan Jason Julien Rolland kidburglar ljsdoug Matthieu Crapet Maurus Cuelenaere Nicolas Michaux Oscar Padilla (dataoscar,padillao) Pavel Alexeev Petr Pulpán pink ? Raziel-23 roadman17 RunningDroid Ryan Simon Lipp Soonbesleeping StalkR Stefan Meier Tapiwa Kelvin Thomas Jensen Tony Lainson Valérian Rousset (tharvik) Vitaly Shukela Walid Iguer Wesley Barroso zodman And also testers, bug reporters, premium account providers. THANK YOU! # vim: set fenc=utf-8: LICENSE000066400000000000000000001045131320177646300121000ustar00rootroot00000000000000 GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: Copyright (C) This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . README.md000066400000000000000000000210501320177646300123440ustar00rootroot00000000000000## Plowshare legacy modules matrix Here is current hosters support list:  |plowdown|plowup|plowdel|plowlist|plowprobe ---|---|---|:---:|:---:|--- 115|account :lock:
[`--auth`]|||| 180upload|free
[`--auth-free`]|free
[`--auth-free, --description, --email-to`]|||c f s 1fichier|premium :recycle:
[`--restrictip, --link-password, --auth`]|account :recycle:
[`--domain, --link-password, --email-to, --restrictip, --auth, --message, --folder`]|x|x|c f i s v 2shared|free
[`--auth-free`]|free :lock:
[`--auth-free`]|x||c f i s 4share_vn|anonymous|anonymous||x|c f s 4shared|free :cookie:
[`--auth-free, --link-password, --torrent`]|free :lock:
[`--auth-free`]||x|c f anonfiles|anonymous|anonymous|||c f s bayfiles|account
[`--auth`]|account
[`--auth`]|x||c f s bayimg|anonymous|anonymous
[`--admin-code, --tags`]|x||c f bigfile|account
[`--auth`]|account :rocket:
[`--folder, --auth`]|x|x|c f s billionuploads|anonymous|anonymous
[`--link-password, --email-to, --description`]|||c f s bitshare|account
[`--auth`]|account :rocket:
[`--hashkey, --method, --auth`]|x||c f s catshare|account
[`--auth`]||||c f i s chomikuj|account
[`--auth`]|account
[`--folder, --description, --auth`]||x|c f s crocko|account
[`--api-key, --api, --auth`]|account
[`--folder, --premium, --auth`]||x|c f s data_hu|anonymous|free :lock:
[`--auth-free`]|x|| dataport_cz|anonymous|account
[`--auth`]|x|| depositfiles|account
[`--auth`]|account
[`--api, --auth`]|x|x|c f s directmirror||account
[`--count, --include, --auth`]||x| divshare|anonymous :cookie:|free :lock:
[`--folder, --auth-free, --description, --email-to`]|x|x|c f dl_free_fr|anonymous :cookie:
[`--link-password`]|anonymous
[`--email-cc, --message, --link-password, --email-to`]|x||c f f s s embedupload||||x| espafiles|anonymous||||c f i s euroshare_eu|free :cookie:
[`--auth-free`]|free
[`--auth-free, --description`]|x||c f h s exoshare||account :rocket:
[`--count, --short-link, --api-key, --auth, --api, --include`]||x| faststore|account
[`--link-password, --auth`]||||c f i s fboom_me|anonymous||||c f i s filebin_ca|anonymous|anonymous|||c f s filecloud|account
[`--apikey, --auth`]|account
[`--apikey, --private, --auth, --tags`]||x|c f s filecore|free
[`--auth-free`]||||c s filedais|anonymous||||c filefactory|account
[`--link-password, --auth`]|account :rocket:
[`--async, --folder, --email-to, --link-password, --auth`]||x|c f s filejoker|account
[`--auth`]||||c f i s filemonkey||free
[`--folder, --auth-free, --create`]||| fileover|anonymous||||c f fileparadox||free
[`--auth-free, --link-password, --email-to`]||| filepost|account
[`--auth`]|account :lock:
[`--auth`]|x|x|c f i s filepup_net|account :cookie:
[`--auth`]||||c f i s filer_net|anonymous|||x|c f i s fileshark|account
[`--auth`]||||c f i s firedrive|anonymous||||c f s flashx|anonymous||||c f freakshare|free :cookie:
[`--auth-free`]|account
[`--auth`]|||c f s fshare_vn|account
[`--link-password, --auth`]|account
[`--folder, --email-to, --link-password, --auth, --description`]||x|c f s gamefront|anonymous|account
[`--auth`]|||c f s ge_tt|anonymous|account
[`--folder, --auth`]||x|c f s gfile_ru|anonymous|anonymous|||c f s go4up||free :rocket:
[`--count, --auth-free, --include, --api`]|x|x| hdstream_to|account :cookie:
[`--stream, --auth`]|account
[`--category, --title, --auth, --level, --nodownload, --full-link, --quality`]|||c f s hexupload|anonymous||||c f i s hipfile|anonymous
[`--link-password`]|account
[`--premium, --description, --link-password, --email-to, --private, --auth, --folder`]|x|x|c f s hotlink_cc|anonymous||||c f i jheberg||account
[`--auth`]||x|c f s keep2share|account
[`--auth`]|account :lock:
[`--folder, --full-link, --create, --auth`]|||c f i s letitbit|account
[`--auth`]|account :lock:
[`--folder, --auth`]|x|x|c f h s lunaticfiles|account
[`--auth`]||||c f i s mediafire|anonymous
[`--link-password`]|free :lock: :rocket:
[`--description, --link-password, --unique, --private, --async, --folder, --auth-free`]||x|c f s megashares|anonymous|free
[`--category, --description, --link-password, --email-to, --private, --auth-free`]|x||c f s mirrorcreator||free
[`--count, --include, --secure, --link-password, --auth-free, --full-link`]||x| mirrorupload||account
[`--api, --include, --auth`]||x| multiup_org||free :lock:
[`--auth-free, --favorites`]||x|c f h multiupload|anonymous|anonymous
[`--count, --email-from, --description, --email-to`]||x| myvdrive|account
[`--link-password, --auth`]|account :rocket:
[`--link-password, --email-to, --private, --auth, --async, --folder`]||x|c f s nakido|anonymous :cookie:||||c f netkups|anonymous|account
[`--auth`]|||c f s netload_in|premium
[`--auth`]|premium
[`--auth`]||x|c f h s nitroflare|anonymous||||c f i s nowdownload_co|anonymous|account :rocket:
[`--auth`]|||c f oboom|account
[`--auth`]|account :rocket:
[`--async, --folder, --auth`]||x|c f s openload|anonymous|account :rocket:
[`--async, --folder, --auth, --header`]|||c f s v pastebin||||x| prefiles|free
[`--auth-free`]||||c f i s promptfile|anonymous||||c f s rapidgator|account
[`--auth`]|account :rocket:
[`--async, --folder, --clear, --auth`]|x|x|c f s rapidu|account
[`--auth`]||||c f i s rghost|anonymous
[`--link-password`]|anonymous
[`--private, --description, --link-password`]|||c f h s t rockfile_eu||account :lock: :recycle:
[`--email-to, --description, --auth, --link-password`]||| ryushare|account
[`--link-password, --auth`]|account
[`--email-to, --link-password, --auth`]|||c f s salefiles|anonymous||||c f i s sendspace|account
[`--auth`]|account
[`--description, --auth`]|x|x|c f s sharebeast|anonymous
[`--link-password`]|account :rocket:
[`--email-to, --link-password, --auth, --description`]|x||c f s sharehost|account
[`--auth`]||||c f i s shareonline_biz|account
[`--auth`]|account :lock:
[`--auth`]|||c f h s sockshare|anonymous
[`--link-password`]|account :rocket:
[`--async, --folder, --link-password, --auth, --method`]|x|x|c f s solidfiles|account
[`--auth`]|account
[`--folder, --private, --auth`]||x|c f s tempsend|anonymous|anonymous
[`--nossl, --ttl`]|||c f i s tempshare|anonymous||||c f i s tezfiles|anonymous||||c f s thefilebay|free
[`--auth-free`]|free
[`--auth-free`]|||c f s turbobit|account
[`--auth`]|account
[`--folder, --auth`]|x|x|c f s uloz_to|anonymous||||c f i s ultramegabit|account
[`--auth`]|account :rocket:
[`--folder, --auth`]|||c f s uplea|anonymous||||c f i s upload_cd|anonymous||||c f i s uploadboy|premium :recycle:
[`--auth`]||||c uploading|anonymous :cookie:|free :lock:
[`--auth-free`]|||c f s uploaded_net|account :cookie: :recycle:
[`--link-password, --auth`]|account :lock: :recycle:
[`--private, --folder, --admin-code, --auth, --link-password`]|x|x|c f s v uploadrocket|account
[`--auth`]|account :rocket:
[`--premium, --description, --link-password, --email-to, --publish, --auth, --folder, --proxy`]|||c f i s upstore|account
[`--auth`]|account
[`--short-link, --auth`]|||c f s uptobox|account
[`--auth`]|account
[`--auth`]|||c f i s videowood_tv||account
[`--auth`]||| vidzi_tv||account :lock: :recycle:
[`--private, --title, --description, --auth, --tags`]||| vid_ag||account :lock: :recycle:
[`--private, --title, --description, --auth, --tags`]||| yourvideohost|premium
[`--auth`]||||c zalaa|anonymous|anonymous
[`--description, --email-to, --link-password`]||x|c f s ziddu|anonymous :cookie:|account
[`--auth`]|||c f s zippyshare|anonymous :cookie:|account
[`--auth`]||x|c f s (last update of this table: 2016-06-11; number of modules/supported hosters: 107) Few notes: * "account" means that both "free" and "premium" accounts are supported. * Emoticons meaning: - :cookie: (**plowdown**) final url download requires cookies. Mostly valid for anonymous downloads. - :lock: mandatory login (it's common to have anonymous upload disabled by hosters). - :recycle: session management is handled by module (avoids multiple logins). - :rocket: (**plowup**) supports remote upload (usually requires premium account). * Square brackets display dedicated command line options for a module and a function (download, upload, etc.). * Characters in **plowprobe** column refer to the sequences interpreted by plowprobe's `--printf` command line option. For example, "c" means the module can report a file's health (dead link or not). Please see `man plowprobe` for more information. anonfiles.sh000066400000000000000000000071041320177646300134030ustar00rootroot00000000000000# Plowshare anonfiles.com module # Copyright (c) 2012-2013 Plowshare team # # This file is part of Plowshare. # # Plowshare is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Plowshare is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Plowshare. If not, see . MODULE_ANONFILES_REGEXP_URL='https\?://\([[:alnum:]]\+\.\)\?anonfiles\.com/' MODULE_ANONFILES_DOWNLOAD_OPTIONS="" MODULE_ANONFILES_DOWNLOAD_RESUME=yes MODULE_ANONFILES_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no MODULE_ANONFILES_DOWNLOAD_FINAL_LINK_NEEDS_EXTRA=() MODULE_ANONFILES_DOWNLOAD_SUCCESSIVE_INTERVAL= MODULE_ANONFILES_UPLOAD_OPTIONS="" MODULE_ANONFILES_UPLOAD_REMOTE_SUPPORT=no MODULE_ANONFILES_PROBE_OPTIONS="" # Output an AnonFiles.com file download URL # $1: cookie file (unused here) # $2: anonfiles url # stdout: real file download link anonfiles_download() { local -r URL=$2 local PAGE FILE_URL FILENAME PAGE=$(curl -L "$URL") || return if match '404 - File Not Found<\|>File does not exist\.<' "$PAGE"; then return $ERR_LINK_DEAD fi FILE_URL=$(echo "$PAGE" | parse_attr_quiet 'download_button' href) if [ -z "$FILE_URL" ]; then FILE_URL=$(echo "$PAGE" | \ parse_attr_quiet 'image_preview' src) || return fi FILENAME=$(echo "$PAGE" | parse_tag '. MODULE_BAYFILES_REGEXP_URL='https\?://\(www\.\)\?bayfiles\.net/' MODULE_BAYFILES_DOWNLOAD_OPTIONS=" AUTH,a,auth,a=USER:PASSWORD,User account" MODULE_BAYFILES_DOWNLOAD_RESUME=yes MODULE_BAYFILES_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no MODULE_BAYFILES_DOWNLOAD_SUCCESSIVE_INTERVAL=300 MODULE_BAYFILES_UPLOAD_OPTIONS=" AUTH,a,auth,a=USER:PASSWORD,User account" MODULE_BAYFILES_UPLOAD_REMOTE_SUPPORT=no MODULE_BAYFILES_DELETE_OPTIONS="" MODULE_BAYFILES_PROBE_OPTIONS="" # Static function. Proceed with login (free or premium) # Uses official API: http://bayfiles.net/api bayfiles_login() { local AUTH=$1 local API_URL=$2 local USER PASSWORD LOGIN_JSON_DATA SESSID ERR split_auth "$AUTH" USER PASSWORD || return LOGIN_JSON_DATA=$(curl "${API_URL}/account/login/${USER}/${PASSWORD}") || return # {"error":"","session":"947qfkvd0eqvohb1sif3hcl0d2"}] SESSID=$(echo "$LOGIN_JSON_DATA" | parse_json_quiet 'session') if [ -z "$SESSID" ]; then ERR=$(echo "$LOGIN_JSON_DATA" | parse_json 'error') log_debug "Remote error: $ERR" return $ERR_LOGIN_FAILED fi log_debug "sessid: $SESSID" echo "$SESSID" return 0 } # Output a bayfiles.net file download URL # $1: cookie file (for account only) # $2: bayfiles url # stdout: real file download link bayfiles_download() { local COOKIE_FILE=$1 local URL=$2 local API_URL='http://api.bayfiles.net/v1' local AJAX_URL='http://bayfiles.net/ajax_download' local PAGE FILE_URL FILENAME SESSION OPT_SESSION if [ -n "$AUTH" ]; then SESSION=$(bayfiles_login "$AUTH" "$API_URL") || return OPT_SESSION="-b SESSID=$SESSION" PAGE=$(curl -c "$COOKIE_FILE" $OPT_SESSION "$URL") || return else PAGE=$(curl -c "$COOKIE_FILE" -b "$COOKIE_FILE" "$URL") || return fi if match 'The link is incorrect\|404 - Not Found' "$PAGE"; then return $ERR_LINK_DEAD fi #

What are the benefits for premium members?

if match 'comparison\|benefits' "$PAGE"; then # Big files case local VFID DELAY TOKEN JSON_COUNT DATA_DL # Upgrade to premium or wait 5 minutes. if match 'Upgrade to premium or wait' "$PAGE"; then DELAY=$(echo "$PAGE" | parse 'premium or wait' \ 'wait[[:space:]]\([[:digit:]]\+\)[[:space:]]*minute') echo $((DELAY * 60)) return $ERR_LINK_TEMP_UNAVAILABLE fi VFID=$(echo "$PAGE" | parse 'var vfid = ' '= \([[:digit:]]\+\);') || return # If no delay were found, we try without DELAY=$(echo "$PAGE" | parse_quiet 'var delay = ' '= \([[:digit:]]\+\);') JSON_COUNT=$(curl --get -b "$COOKIE_FILE" \ --data "action=startTimer&vfid=$VFID" \ "$AJAX_URL") || return TOKEN=$(echo "$JSON_COUNT" | parse_json token) || return wait $((DELAY)) || return DATA_DL=$(curl -b "$COOKIE_FILE" \ $OPT_SESSION \ --data "action=getLink&vfid=$VFID&token=$TOKEN" \ "$AJAX_URL") || return FILE_URL=$(echo "$DATA_DL" | \ parse 'onclick' "\(http[^']*\)") || return # Premium account else FILE_URL=$(echo "$PAGE" | parse_attr 'class="highlighted-btn' 'href') || return MODULE_BAYFILES_DOWNLOAD_SUCCESSIVE_INTERVAL=0 fi # Extract filename from $PAGE, work for both cases FILENAME=$(parse_attr 'title="' 'title' <<< "$PAGE" | html_to_utf8) echo "$FILE_URL" echo "$FILENAME" } # Upload a file to bayfiles.net # $1: cookie file (unused here) # $2: input file (with full path) # $3: remote filename # stdout: download link + delete link + admin link bayfiles_upload() { local FILE=$2 local DESTFILE=$3 local API_URL='http://api.bayfiles.net/v1' local SESSION_GET JSON UPLOAD_URL FILE_URL DELETE_URL ADMIN_URL # Account users (free or premium) have a session id if [ -n "$AUTH" ]; then SESSION_GET='?session='$(bayfiles_login "$AUTH" "$API_URL") || return else SESSION_GET='' fi JSON=$(curl "${API_URL}/file/uploadUrl${SESSION_GET}") || return # {"error":"","uploadUrl":"http ..","progressUrl":"http .."} UPLOAD_URL=$(echo "$JSON" | parse_json 'uploadUrl') || return # Sanity check (wrong upload url: site is broken) if [[ $UPLOAD_URL = http:///* ]]; then return $ERR_LINK_TEMP_UNAVAILABLE fi JSON=$(curl_with_log -F "file=@$FILE;filename=$DESTFILE" \ "$UPLOAD_URL") || return # {"error":"","fileId":"abK1","size":"123456","sha1":"6f ..", ..} FILE_URL=$(echo "$JSON" | parse_json 'downloadUrl') || return DELETE_URL=$(echo "$JSON" | parse_json 'deleteUrl') || return ADMIN_URL=$(echo "$JSON" | parse_json 'linksUrl') || return echo "$FILE_URL" echo "$DELETE_URL" echo "$ADMIN_URL" } # Delete a file on bayfiles # $1: cookie file (unused here) # $2: delete link bayfiles_delete() { local URL=$2 local PAGE CONFIRM PAGE=$(curl "$URL") || return # Are you sure you want to delete this file? if match 'Confirm Deletion' "$PAGE"; then CONFIRM=$(echo "$PAGE" | parse_attr 'Confirm' href) || return PAGE=$(curl "$URL$CONFIRM") || return # File successfully deleted. match 'successfully deleted' "$PAGE" && return 0 # The requested file could not be found. elif match 'file could not be found' "$PAGE"; then return $ERR_LINK_DEAD fi # Invalid security token. Please check your link. return $ERR_FATAL } # Probe a download URL # $1: cookie file (unused here) # $2: bayfile url # $3: requested capability list # stdout: 1 capability per line bayfiles_probe() { local -r URL=$2 local -r REQ_IN=$3 local PAGE REQ_OUT FILE_SIZE PAGE=$(curl -L "$URL") || return if match 'The link is incorrect\|404 - Not Found' "$PAGE"; then return $ERR_LINK_DEAD fi REQ_OUT=c if [[ $REQ_IN = *f* ]]; then parse_attr 'title=' 'title' <<< "$PAGE" | html_to_utf8 && \ REQ_OUT="${REQ_OUT}f" fi if [[ $REQ_IN = *s* ]]; then FILE_SIZE=$(echo "$PAGE" | parse '>File:<' '\([^<]*\)' 1) && \ translate_size "$FILE_SIZE" && REQ_OUT="${REQ_OUT}s" fi echo $REQ_OUT } bayimg.sh000066400000000000000000000072341320177646300127010ustar00rootroot00000000000000# Plowshare bayimg.com module # Copyright (c) 2013 Plowshare team # # This file is part of Plowshare. # # Plowshare is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Plowshare is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Plowshare. If not, see . MODULE_BAYIMG_REGEXP_URL='https\?://\(www\.\)\?bayimg\.com/' MODULE_BAYIMG_DOWNLOAD_OPTIONS="" MODULE_BAYIMG_DOWNLOAD_RESUME=yes MODULE_BAYIMG_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no MODULE_BAYIMG_DOWNLOAD_SUCCESSIVE_INTERVAL= MODULE_BAYIMG_UPLOAD_OPTIONS=" ADMIN_CODE,,admin-code,s=ADMIN_CODE,Admin code (used for file deletion) TAGS,,tags,l=LIST,Provide list of tags (comma separated)" MODULE_BAYIMG_UPLOAD_REMOTE_SUPPORT=no MODULE_BAYIMG_DELETE_OPTIONS=" LINK_PASSWORD,p,link-password,S=PASSWORD,Admin password (mandatory)" MODULE_BAYIMG_PROBE_OPTIONS="" # Output a bayimg.com file download URL # $1: cookie file (unused here) # $2: bayimg url # stdout: real file download link bayimg_download() { local -r URL=$2 local PAGE FILE_URL FILE_NAME PAGE=$(curl -L "$URL") || return if match '404 . Not Found' "$PAGE"; then return $ERR_LINK_DEAD fi FILE_URL=$(parse_attr 'toggleResize(' src <<< "$PAGE") || return # Filename is not always displayed FILE_NAME=$(parse_quiet '>Filename:' '

Filename:[[:space:]]\([^<]\+\)' <<< "$PAGE") echo "http:$FILE_URL" test -z "$FILE_NAME" || echo "$FILE_NAME" } # Upload a file to bayimg.com # $1: cookie file (unused here) # $2: input file (with full path) # $3: remote filename # stdout: download link + admin code bayimg_upload() { local -r FILE=$2 local -r DESTFILE=$3 local PAGE FILE_URL if [ -n "$ADMIN_CODE" ]; then # No known restrictions (length limitation or forbidden characters) : else ADMIN_CODE=$(random a 8) fi PAGE=$(curl_with_log -F "tags=${TAGS[*]}" \ -F "code=$ADMIN_CODE" \ -F "file=@$FILE;filename=$DESTFILE" \ 'http://bayimg.com/upload') || return FILE_URL=$(parse_attr 'image-setting' href <<< "$PAGE") || return echo "http:$FILE_URL" echo echo "$ADMIN_CODE" } # Delete a file on bayimg (requires an admin code) # $1: cookie file (unused here) # $2: delete link bayimg_delete() { local -r URL=$2 local PAGE REDIR if [ -z "$LINK_PASSWORD" ]; then LINK_PASSWORD=$(prompt_for_password) || return fi PAGE=$(curl -i "$URL" -d "code=$LINK_PASSWORD") || return if match 'REMOVAL CODE' "$PAGE"; then return $ERR_LINK_PASSWORD_REQUIRED fi REDIR=$(grep_http_header_location_quiet <<< "$PAGE") if [ "$REDIR" = '/' ]; then return 0 fi return $ERR_LINK_DEAD } # Probe a download URL # $1: cookie file (unused here) # $2: bayfile url # $3: requested capability list # stdout: 1 capability per line bayimg_probe() { local -r URL=$2 local -r REQ_IN=$3 local PAGE REQ_OUT PAGE=$(curl -L "$URL") || return if match '404 . Not Found' "$PAGE"; then return $ERR_LINK_DEAD fi REQ_OUT=c if [[ $REQ_IN = *f* ]]; then parse '>Filename:' '

Filename:[[:space:]]\([^<]\+\)' <<< "$PAGE" && \ REQ_OUT="${REQ_OUT}f" fi echo $REQ_OUT } bigfile.sh000066400000000000000000000335171320177646300130350ustar00rootroot00000000000000# Plowshare bigfile.to module # Copyright (c) 2016 Plowshare team # # This file is part of Plowshare. # # Plowshare is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Plowshare is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Plowshare. If not, see . MODULE_BIGFILE_REGEXP_URL='https\?://\(www\.\)\?\(bigfile\.to\|uploadable\.ch\)/' MODULE_BIGFILE_DOWNLOAD_OPTIONS=" AUTH,a,auth,a=USER:PASSWORD,User account" MODULE_BIGFILE_DOWNLOAD_RESUME=no MODULE_BIGFILE_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no MODULE_BIGFILE_DOWNLOAD_SUCCESSIVE_INTERVAL= MODULE_BIGFILE_UPLOAD_OPTIONS=" AUTH,a,auth,a=USER:PASSWORD,User account FOLDER,,folder,s=FOLDER,Folder to upload files into" MODULE_BIGFILE_UPLOAD_REMOTE_SUPPORT=yes MODULE_BIGFILE_LIST_OPTIONS="" MODULE_BIGFILE_LIST_HAS_SUBFOLDERS=no MODULE_BIGFILE_PROBE_OPTIONS="" MODULE_BIGFILE_DELETE_OPTIONS="" # Static function. Proceed with login # $1: authentication # $2: cookie file # $3: base URL # stdout: account type ("free" or "premium") on success. bigfile_login() { local -r AUTH=$1 local -r COOKIE_FILE=$2 local -r BASE_URL=$3 local CV PAGE MSG LOGIN_DATA NAME TYPE if CV=$(storage_get 'cookie_file'); then echo "$CV" >"$COOKIE_FILE" # Check for expired session. PAGE=$(curl -b "$COOKIE_FILE" "$BASE_URL/indexboard.php") || return if ! match '>Dashboard<' "$PAGE"; then storage_set 'cookie_file' return $ERR_EXPIRED_SESSION fi log_debug 'session (cached)' MSG='reused login for' else LOGIN_DATA='userName=$USER&userPassword=$PASSWORD&autoLogin=on&action__login=normalLogin' PAGE=$(post_login "$AUTH" "$COOKIE_FILE" "$LOGIN_DATA" \ "$BASE_URL/login.php") || return if ! match 'Logging in' "$PAGE"; then return $ERR_LOGIN_FAILED fi storage_set 'cookie_file' "$(cat "$COOKIE_FILE")" PAGE=$(curl -b "$COOKIE_FILE" "$BASE_URL/indexboard.php") || return log_debug 'session (new)' MSG='logged in as' fi NAME=$(parse_quiet 'id="dashboard_box"' '>\([^<]*\)<' 4 <<< "$PAGE") if match '>Upgrade Now<' "$PAGE"; then TYPE='free' else TYPE='premium' fi log_debug "Successfully $MSG '$TYPE' member '$NAME'" echo $TYPE } # Output a bigfile file download URL and name # $1: cookie file # $2: bigfile url # stdout: file download link bigfile_download() { local -r COOKIE_FILE=$1 local URL=$2 local -r BASE_URL='https://www.bigfile.to' local FILE_ID ACCOUNT PAGE JSON FILE_URL WAIT_TIME FILE_ID=$(parse . '/file/\([^/]\+\)' <<< "$URL") || return URL="$BASE_URL/file/$FILE_ID" readonly URL if [ -n "$AUTH" ]; then ACCOUNT=$(bigfile_login "$AUTH" "$COOKIE_FILE" "$BASE_URL") || return fi # Note: Save HTTP headers to catch premium users' "direct downloads". PAGE=$(curl -i -c "$COOKIE_FILE" -b "$COOKIE_FILE" "$URL") || return if match 'File not available\|cannot be found on the server\|no longer available\|Page not found' "$PAGE"; then return $ERR_LINK_DEAD fi # If this is a premium download, we already have a download link. if [ "$ACCOUNT" = 'premium' ]; then MODULE_BIGFILE_DOWNLOAD_RESUME=yes # Get a download link, if this was a direct download. FILE_URL=$(grep_http_header_location_quiet <<< "$PAGE") if [ -z "$FILE_URL" ]; then PAGE=$(curl -b "$COOKIE_FILE" \ -d 'download=premium' \ -i "$URL") || return FILE_URL=$(grep_http_header_location <<< "$PAGE") || return fi echo "$FILE_URL" return 0 fi if match 'var reCAPTCHA_publickey' "$PAGE"; then local PUBKEY WCI CHALLENGE WORD ID # http://www.google.com/recaptcha/api/challenge?k= PUBKEY=$(parse 'var reCAPTCHA_publickey' "var reCAPTCHA_publickey='\([^']\+\)" <<< "$PAGE") || return fi JSON=$(curl -b "$COOKIE_FILE" \ -d 'downloadLink=wait' \ "$URL") || return WAIT_TIME=$(parse_json 'waitTime' <<< "$JSON") || return wait $WAIT_TIME || return JSON=$(curl -b "$COOKIE_FILE" \ -d 'checkDownload=check' \ "$URL") || return if match '"fail":"timeLimit"' "$JSON"; then local HOURS MINS SECS PAGE=$(curl -b "$COOKIE_FILE" \ -d 'checkDownload=showError' \ -d 'errorType=timeLimit' \ "$URL") || return HOURS=$(parse_quiet '>Please wait' \ '[^[:digit:]]\([[:digit:]]\+\) hours\?' <<< "$PAGE") MINS=$(parse_quiet '>Please wait' \ '[^[:digit:]]\([[:digit:]]\+\) minutes\?' <<< "$PAGE") SECS=$(parse_quiet '>Please wait' \ '[^[:digit:]]\([[:digit:]]\+\) seconds\?' <<< "$PAGE") log_error 'Download limit reached.' # Note: Always use decimal base instead of octal if there are leading zeros. echo $(( (( 10#$HOURS * 60 ) + 10#$MINS ) * 60 + 10#$SECS )) return $ERR_LINK_TEMP_UNAVAILABLE elif ! match '"success":"showCaptcha"' "$JSON"; then log_error "Unexpected response: $JSON" return $ERR_FATAL fi if [ -n "$PUBKEY" ]; then WCI=$(recaptcha_process $PUBKEY) || return { read WORD; read CHALLENGE; read ID; } <<<"$WCI" JSON=$(curl -b "$COOKIE_FILE" \ -d "recaptcha_challenge_field=$CHALLENGE" \ -d "recaptcha_response_field=$WORD" \ -d "recaptcha_shortencode_field=$FILE_ID" \ "$BASE_URL/checkReCaptcha.php") || return if ! match '"success":1' "$JSON"; then captcha_nack $ID log_error 'Wrong captcha' return $ERR_CAPTCHA fi captcha_ack $ID log_debug 'Correct captcha' fi PAGE=$(curl -b "$COOKIE_FILE" \ -d 'downloadLink=show' \ "$URL") || return PAGE=$(curl -b "$COOKIE_FILE" \ -d 'download=normal' \ -i "$URL") || return grep_http_header_location <<< "$PAGE" || return } # Check if specified folder name is valid. # $1: folder name selected by user # $2: cookie file (logged into account) # $3: base url # stdout: folder ID bigfile_check_folder() { local -r NAME=$1 local -r COOKIE_FILE=$2 local -r BASE_URL=$3 local JSON FOLDERS FOLDERS_N FOLDER_ID log_debug 'Getting folder data' JSON=$(curl -b "$COOKIE_FILE" \ -d 'current_page=1' \ -d 'extra=folderPanel' \ "$BASE_URL/file-manager-expand-folder.php") || return FOLDERS=$(replace_all '{', $'\n{' <<< "$JSON") || return FOLDERS=$(replace_all '}', $'}\n' <<< "$FOLDERS") || return FOLDERS_N=$(parse_all_quiet '"folderName":"' '"folderName":"\([^"]\+\)' <<< "$FOLDERS") if ! match "^$NAME$" "$FOLDERS_N"; then log_debug "Creating folder: '$NAME'" JSON=$(curl -b "$COOKIE_FILE" \ -d "newFolderName=$NAME" \ -d 'createFolderDest=0' \ "$BASE_URL/file-manager-action.php") || return if ! match '"success":true' "$JSON"; then log_error 'Failed to create folder.' return $ERR_FATAL fi JSON=$(curl -b "$COOKIE_FILE" \ -d 'current_page=1' \ -d 'extra=folderPanel' \ "$BASE_URL/file-manager-expand-folder.php") || return FOLDERS=$(replace_all '{', $'\n{' <<< "$JSON") || return FOLDERS=$(replace_all '}', $'}\n' <<< "$FOLDERS") || return fi FOLDER_ID=$(parse "\"folderName\":\"$NAME\"" '"folderId":"\([^"]\+\)' <<< "$FOLDERS") || return log_debug "Folder ID: '$FOLDER_ID'" echo "$FOLDER_ID" } # Upload a file to bigfile # $1: cookie file # $2: file path or remote url # $3: remote filename # stdout: download link + delete link bigfile_upload() { local -r COOKIE_FILE=$1 local -r FILE=$2 local -r DESTFILE=$3 local -r BASE_URL='https://www.bigfile.to' local ACCOUNT PAGE JSON UPLOAD_URL FILE_ID FILE_NAME DEL_CODE # Sanity checks if [ -z "$AUTH" ]; then if [ -n "$FOLDER" ]; then log_error 'You must be registered to use folders.' return $ERR_LINK_NEED_PERMISSIONS elif match_remote_url "$FILE"; then log_error 'You must be registered to do remote uploads.' return $ERR_LINK_NEED_PERMISSIONS fi fi if match_remote_url "$FILE"; then if [ -n "$FOLDER" ]; then log_error 'You cannot choose folder for remote link.' return $ERR_LINK_NEED_PERMISSIONS fi fi if [ -n "$AUTH" ]; then ACCOUNT=$(bigfile_login "$AUTH" "$COOKIE_FILE" "$BASE_URL") || return fi if [ -n "$FOLDER" ]; then FOLDER_ID=$(bigfile_check_folder "$FOLDER" "$COOKIE_FILE" "$BASE_URL") || return fi PAGE=$(curl -c "$COOKIE_FILE" -b "$COOKIE_FILE" \ "$BASE_URL/index.php") || return if ! match_remote_url "$FILE"; then local MAX_SIZE SZ SZ=$(get_filesize "$FILE") if [ "$ACCOUNT" = 'premium' ]; then MAX_SIZE='5368709120' # 5 GiB else MAX_SIZE='2147483648' # 2 GiB fi log_debug "Max size: $MAX_SIZE" if [ "$SZ" -gt "$MAX_SIZE" ]; then log_debug "File is bigger than $MAX_SIZE." return $ERR_SIZE_LIMIT_EXCEEDED fi fi # Upload remote file if match_remote_url "$FILE"; then if ! match '^https\?://' "$FILE" && ! match '^ftp://' "$FILE"; then log_error 'Unsupported protocol for remote upload.' return $ERR_BAD_COMMAND_LINE fi PAGE=$(curl -b "$COOKIE_FILE" \ -d "urls=$FILE" \ -d 'remoteUploadFormType=web' \ -d 'showPage=remoteUploadFormWeb.tpl' \ "$BASE_URL/uploadremote.php") || return if ! match 'Upload Successful' "$PAGE"; then log_error 'Remote upload failed.' return $ERR_FATAL fi log_error 'Once remote upload completed, check your account for link.' return $ERR_ASYNC_REQUEST # Upload local file else UPLOAD_URL=$(parse 'var uploadUrl' "var uploadUrl = '\([^']\+\)" <<< "$PAGE") || return JSON=$(curl_with_log -X PUT \ -H "X-File-Name: $DESTFILE" \ -H "X-File-Size: $SZ" \ -H "Origin: $BASE_URL" \ --data-binary "@$FILE" \ "$UPLOAD_URL") || return DEL_CODE=$(parse_json 'deleteCode' <<< "$JSON") || return FILE_NAME=$(parse_json 'fileName' <<< "$JSON") || return FILE_ID=$(parse_json 'shortenCode' <<< "$JSON") || return fi if [ -n "$FOLDER" ]; then local UPLOAD_ID log_debug "Moving file to folder '$FOLDER'..." # Get root folder content dorted by upload date DESC # Last uploaded file will be on top JSON=$(curl -b "$COOKIE_FILE" \ -d 'parent_folder_id=0' \ -d 'current_page=1' \ -d 'sort_field=2' \ -d 'sort_order=DESC' \ "$BASE_URL/file-manager-expand-folder.php") || return JSON=$(replace_all '{', $'\n{' <<< "$JSON") || return JSON=$(replace_all '}', $'}\n' <<< "$JSON") || return UPLOAD_ID=$(parse "$FILE_ID" '"uploadId":"\([^"]\+\)' <<< "$JSON") || return log_debug "Upload ID: '$UPLOAD_ID'" JSON=$(curl -b "$COOKIE_FILE" \ -d "moveFolderId=$UPLOAD_ID" \ -d "moveFolderDest=$FOLDER_ID" \ -d 'CurrentFolderId=0' \ "$BASE_URL/file-manager-action.php") || return if ! match '"successCount":1' "$JSON"; then log_error 'Could not move file into folder.' fi fi echo "${BASE_URL}/file/$FILE_ID/$FILE_NAME" echo "${BASE_URL}/file/$FILE_ID/delete/$DEL_CODE" } # Probe a download URL # $1: cookie file (unused here) # $2: bigfile url # $3: requested capability list # stdout: 1 capability per line bigfile_probe() { local -r URL=$2 local -r REQ_IN=$3 local PAGE FILE_NAME FILE_SIZE REQ_OUT PAGE=$(curl -L "$URL") || return if match 'File not available\|cannot be found on the server\|no longer available\|Page not found' "$PAGE"; then return $ERR_LINK_DEAD fi REQ_OUT=c if [[ $REQ_IN = *f* ]]; then parse_attr '"file_name"' 'title' <<< "$PAGE" && REQ_OUT="${REQ_OUT}f" fi if [[ $REQ_IN = *s* ]]; then FILE_SIZE=$(parse '"filename_normal"' '>(\([^)]\+\)' <<< "$PAGE") && \ translate_size "$FILE_SIZE" && REQ_OUT="${REQ_OUT}s" fi echo $REQ_OUT } # List a bigfile web folder URL # $1: folder URL # $2: recurse subfolders (null string means not selected) # stdout: list of links and file names (alternating) bigfile_list() { local -r URL=$1 local -r REC=$2 local PAGE LINKS NAMES PAGE=$(curl -L "$URL") || return if match 'File not available\|cannot be found on the server\|no longer available\|Page not found' "$PAGE"; then return $ERR_LINK_DEAD fi NAMES=$(parse_all_quiet 'filename_normal' '">\(.*\) sapk.fr # # This file is part of Plowshare. # # Plowshare is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Plowshare is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Plowshare. If not, see . # # Note: This module is similar to 180upload MODULE_BILLIONUPLOADS_REGEXP_URL='https\?://\(www\.\)\?[Bb]illion[Uu]ploads\.com/' MODULE_BILLIONUPLOADS_DOWNLOAD_OPTIONS="" MODULE_BILLIONUPLOADS_DOWNLOAD_RESUME=yes MODULE_BILLIONUPLOADS_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=unused MODULE_BILLIONUPLOADS_DOWNLOAD_SUCCESSIVE_INTERVAL= MODULE_BILLIONUPLOADS_UPLOAD_OPTIONS=" LINK_PASSWORD,p,link-password,S=PASSWORD,Protect a link with a password DESCRIPTION,d,description,S=DESCRIPTION,Set file description TOEMAIL,,email-to,e=EMAIL, field for notification email" MODULE_BILLIONUPLOADS_UPLOAD_REMOTE_SUPPORT=no MODULE_BILLIONUPLOADS_PROBE_OPTIONS="" # Full urldecode # $1: url encoded string # stdout: decoded string billionuploads_urldecode(){ echo -e "$(sed 's/+/ /g;s/%\(..\)/\\x\1/g;')" } # Handle anti-DDoS protection # $1: cookie file # $2: main URL # $3: (X)HTML page data # stdout: (X)HTML page data billionuploads_antiddos(){ local -r COOKIE_FILE=$1 local -r URL=$2 local PAGE=$3 local -r BASE_URL=$(basename_url "$URL") local FORM_X FORM_Y FORM_CAPTCHA FORM_HTML FORM_ACTION REDIR HEX HEX_ESC HEX_CHAR # Anti-DDoS protection handle if match 'iframe src="/_Incapsula_Resource' "$PAGE" || match 'var z="";var b="' "$PAGE"; then if match 'iframe src' "$PAGE"; then REDIR=$(parse_attr 'iframe' 'src' <<< "$PAGE") || return PAGE=$(curl -b "$COOKIEFILE" "$BASE_URL$REDIR") || return local PUBKEY WCI CHALLENGE WORD ID # http://www.google.com/recaptcha/api/challenge?k= PUBKEY=$(parse 'recaptcha.*?k=' '?k=\([[:alnum:]_-.]\+\)' <<< "$PAGE") || return WCI=$(recaptcha_process $PUBKEY) || return { read WORD; read CHALLENGE; read ID; } <<<"$WCI" FORM_X=$(random dec 1) FORM_Y=$(random dec 1) FORM_CAPTCHA="-d recaptcha_challenge_field=$CHALLENGE -d recaptcha_response_field=$WORD -d x=$FORM_X -d y=$FORM_Y" FORM_HTML=$(grep_form_by_order "$PAGE") || return FORM_ACTION=$(parse_form_action <<< "$FORM_HTML") || return PAGE=$(curl -b "$COOKIEFILE" -c "$COOKIEFILE" "$BASE_URL$FORM_ACTION" $FORM_CAPTCHA) || return elif match 'var z="";var b="' "$PAGE"; then HEX=$(parse 'var z="";var b="' 'var z="";var b="\([^"]\+\)' <<< "$PAGE") || return while read -n 2 HEX_CHAR; do HEX_ESC="$HEX_ESC\x$HEX_CHAR" done <<< "$HEX" HEX_ESC=$(echo -e "$HEX_ESC") REDIR=$(parse . 'xhr.open("GET","\([^"]\+\)' <<< "$HEX_ESC") || return PAGE=$(curl -b "$COOKIEFILE" -c "$COOKIEFILE" "$BASE_URL$REDIR") || return fi if ! match 'window\..*location\.reload(true);' "$PAGE"; then if [ -n "$ID" ]; then captcha_nack $ID log_error 'Wrong captcha.' return $ERR_CAPTCHA else return $ERR_FATAL fi fi PAGE=$(curl -L -b "$COOKIEFILE" -c "$COOKIEFILE" "$URL") || return fi if match 'iframe src="/_Incapsula_Resource' "$PAGE" || match 'var z="";var b="' "$PAGE"; then if [ -n "$ID" ]; then captcha_nack $ID log_error 'Wrong captcha.' return $ERR_CAPTCHA else return $ERR_FATAL fi fi [ -n "$ID" ] && captcha_ack $ID echo "$PAGE" return 0 } # Output a billionuploads.com file download URL and NAME # $1: cookie file # $2: billionuploads.com url # stdout: real file download link and name billionuploads_download() { local -r COOKIEFILE=$1 local -r URL=$2 local PAGE FILE_NAME FILE_URL ERR local FORM_HTML FORM_OP FORM_ID FORM_RAND FORM_DD FORM_METHOD_F FORM_METHOD_P FORM_ADD_TMP FORM_ADD CRYPT PAGE=$(curl -L -b "$COOKIEFILE" -c "$COOKIEFILE" "$URL") || return PAGE=$(billionuploads_antiddos "$COOKIEFILE" "$URL" "$PAGE") || return # File Not Found, Copyright infringement issue, file expired or deleted by its owner. if match '[Ff]ile [Nn]ot [Ff]ound' "$PAGE"; then return $ERR_LINK_DEAD fi if ! check_exec 'base64'; then log_error "'base64' is required but was not found in path." return $ERR_SYSTEM fi FORM_HTML=$(grep_form_by_name "$PAGE" 'F1') || return FORM_OP=$(echo "$FORM_HTML" | parse_form_input_by_name 'op') || return FORM_ID=$(echo "$FORM_HTML" | parse_form_input_by_name 'id') || return #FORM_RAND=$(echo "$FORM_HTML" | parse_form_input_by_name 'rand') || return FORM_RAND_NAME=$(parse "\$('form\[name=\"F1\"\]')" "attr('name','\([^']\+\)" <<< "$FORM_HTML") FORM_RAND=$(parse_tag 'source="self"' 'textarea' <<< "$FORM_HTML") || return FORM_DD=$(echo "$FORM_HTML" | parse_form_input_by_name 'down_direct') || return # Note: this is quiet parsing FORM_METHOD_F=$(echo "$FORM_HTML" | parse_form_input_by_name_quiet 'method_free') FORM_METHOD_P=$(echo "$FORM_HTML" | parse_form_input_by_name_quiet 'method_premium') FORM_ADD_TMP=$(echo "$PAGE" | parse "document.getElementById('.*').innerHTML=decodeURIComponent" 'decodeURIComponent("\([^"]\+\)' | billionuploads_urldecode) FORM_ADD=$(echo "$FORM_ADD_TMP" | parse_attr 'name')'='$(echo "$FORM_ADD_TMP" | parse_attr 'value') PAGE=$(curl -b "$COOKIE_FILE" \ -F "referer=" \ -F "op=$FORM_OP" \ -F "id=$FORM_ID" \ -F "$FORM_RAND_NAME=$FORM_RAND" \ -F "down_direct=$FORM_DD" \ -F "method_free=$FORM_METHOD_F" \ -F "method_premium=$FORM_METHOD_P" \ -F "$FORM_ADD" \ "$URL" | break_html_lines ) || return # Catch the error "the file is temporary unavailable". if match 'file is temporarily unavailable - please try again later' "$PAGE"; then return $ERR_LINK_TEMP_UNAVAILABLE fi #

Skipped countdown
if match '
[^<]*XXX\([^<]\+\)XXX[^<]*') || return if ! match '^[[:alnum:]=]\+$' "$CRYPT"; then log_error "Something wrong with encoded message." return $ERR_FATAL fi FILE_URL=$(echo "$CRYPT" | base64 --decode | base64 --decode) echo "$FILE_URL" } # Upload a file to billionuploads # $1: cookie file (not used here) # $2: input file (with full path) # $3: remote filename # stdout: download_url billionuploads_upload() { local -r COOKIEFILE=$1 local -r FILE=$2 local -r DEST_FILE=$3 local -r BASE_URL='http://billionuploads.com/' local -r MAX_SIZE=2147483648 # 2GiB local PAGE UPLOAD_ID USER_TYPE DL_URL DEL_URL local FORM_HTML FORM_ACTION FORM_UTYPE FORM_SESS FORM_TMP_SRV FILE_CODE STATE # Check for forbidden file extensions case ${DEST_FILE##*.} in php|pl|cgi|py|sh|shtml) log_error 'File extension is forbidden. Try renaming your file.' return $ERR_FATAL ;; esac local SZ=$(get_filesize "$FILE") if [ "$SZ" -gt "$MAX_SIZE" ]; then log_debug "file is bigger than $MAX_SIZE" return $ERR_SIZE_LIMIT_EXCEEDED fi PAGE=$(curl -L -b "$COOKIEFILE" -c "$COOKIEFILE" "$BASE_URL") || return PAGE=$(billionuploads_antiddos "$COOKIEFILE" "$BASE_URL" "$PAGE") || return FORM_HTML=$(grep_form_by_name "$PAGE" 'file') || return FORM_ACTION=$(echo "$FORM_HTML" | parse_form_action) || return FORM_UTYPE=$(echo "$FORM_HTML" | parse_form_input_by_name 'upload_type') FORM_SESS=$(echo "$FORM_HTML" | parse_form_input_by_name_quiet 'sess_id') FORM_TMP_SRV=$(echo "$FORM_HTML" | parse_form_input_by_name 'srv_tmp_url') || return log_debug "Server URL: '$FORM_TMP_SRV'" UPLOAD_ID=$(random dec 12) USER_TYPE='' PAGE=$(curl "${FORM_TMP_SRV}/status.html?${UPLOAD_ID}=$DEST_FILE=billionuploads.com") || return # Sanity check. Avoid failure after effective upload if match '>404 Not Found<' "$PAGE"; then log_error 'upstream error (404)' return $ERR_FATAL fi PAGE=$(curl_with_log \ -F "upload_type=$FORM_UTYPE" \ -F "sess_id=$FORM_SESS" \ -F "srv_tmp_url=$FORM_TMP_SRV" \ -F "file_0=@$FILE;filename=$DEST_FILE" \ --form-string "file_0_descr=$DESCRIPTION" \ -F "file_1=@/dev/null;filename=" \ -F 'tos=1' \ --form-string "link_rcpt=$TOEMAIL" \ --form-string "link_pass=$LINK_PASSWORD" \ -F 'submit_btn= Upload! ' \ "${FORM_ACTION}${UPLOAD_ID}&js_on=1&utype=${USER_TYPE}&upload_type=$FORM_UTYPE" | \ break_html_lines) || return FILE_CODE=$(echo "$PAGE" | parse 'fc-X-x-' 'fc-X-x-\([^"]\+\)') STATE=$(echo "$PAGE" | parse 'st-X-x-' 'st-X-x-\([^"]\+\)') if [ "$STATE" = 'OK' ]; then echo "$BASE_URL$FILE_CODE" return 0 fi log_error "Unexpected status: $STATE" return $ERR_FATAL } # Probe a download URL # $1: cookie file # $2: bitshare url # $3: requested capability list # stdout: 1 capability per line billionuploads_probe() { local -r URL=$2 local -r REQ_IN=$3 local PAGE REQ_OUT FILE_NAME FILE_SIZE PAGE=$(curl -L "$URL") || return ! match '[Ff]ile [Nn]ot [Ff]ound' "$PAGE" || return $ERR_LINK_DEAD REQ_OUT=c # Filename can be truncated if [[ $REQ_IN = *f* ]]; then FILE_NAME=$(echo "$PAGE" | parse_quiet '>File Name:<' 'class="dofir"[^>]*>\([^<]*\)' 1) test "$FILE_NAME" && echo "$FILE_NAME" && REQ_OUT="${REQ_OUT}f" fi if [[ $REQ_IN = *s* ]]; then FILE_SIZE=$(echo "$PAGE" | parse_quiet '>File Size:<' 'class="dofir"[^>]*>\([^<]*\)' 1) test "$FILE_SIZE" && translate_size "$FILE_SIZE" && REQ_OUT="${REQ_OUT}s" fi echo $REQ_OUT } bitshare.sh000066400000000000000000000376231320177646300132370ustar00rootroot00000000000000# Plowshare bitshare.com module # Copyright (c) 2013 Plowshare team # # This file is part of Plowshare. # # Plowshare is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Plowshare is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Plowshare. If not, see . MODULE_BITSHARE_REGEXP_URL='http://\(www\.\)\?bitshare\.com/' MODULE_BITSHARE_DOWNLOAD_OPTIONS=" AUTH,a,auth,a=USER:PASSWORD,User account" MODULE_BITSHARE_DOWNLOAD_RESUME=yes MODULE_BITSHARE_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no MODULE_BITSHARE_DOWNLOAD_SUCCESSIVE_INTERVAL= MODULE_BITSHARE_UPLOAD_OPTIONS=" METHOD,,method,s=METHOD,Upload method (openapi or form, default: openapi) AUTH,a,auth,a=USER:PASSWORD,User account HASHKEY,,hashkey,s=HASHKEY,Hashkey used in openapi (override -a/--auth)" MODULE_BITSHARE_UPLOAD_REMOTE_SUPPORT=yes MODULE_BITSHARE_DELETE_OPTIONS="" MODULE_BITSHARE_PROBE_OPTIONS="" # Login to bitshare (HTML form) # $1: authentication # $2: cookie file bitshare_login() { local AUTH=$1 local COOKIE_FILE=$2 local LOGIN post_login "$AUTH" "$COOKIE_FILE" \ 'user=$USER&password=$PASSWORD&rememberlogin=&submit=Login' \ "http://bitshare.com/login.html" -b "$COOKIE_FILE" > /dev/null || return LOGIN=$(parse_cookie_quiet 'login' < "$COOKIE_FILE") if test -z "$LOGIN"; then return $ERR_LOGIN_FAILED else log_debug 'successfully logged in' fi } # Output a bitshare file download URL # $1: cookie file # $2: bitshare url # stdout: real file download link bitshare_download() { local COOKIEFILE=$1 local URL=$2 local BASE_URL='http://bitshare.com' local FILE_ID POST_URL WAIT AJAXDL DATA RESPONSE local NEED_RECAPTCHA FILE_URL FILENAME FILE_ID=$(echo "$URL" | parse_quiet 'bitshare' 'bitshare\.com/files/\([^/]\+\)/') if test -z "$FILE_ID"; then FILE_ID=$(echo "$URL" | parse 'bitshare' 'bitshare\.com/?f=\(.\+\)$') || return fi log_debug "file id=$FILE_ID" POST_URL="$BASE_URL/files-ajax/$FILE_ID/request.html" # Set website language to english (language_selection=EN) curl -c "$COOKIEFILE" -o /dev/null "$BASE_URL/?language=EN" || return # Login if test "$AUTH"; then bitshare_login "$AUTH" "$COOKIEFILE" || return fi # Add cookie entries: last_file_downloaded, trafficcontrol RESPONSE=$(curl -i -b "$COOKIEFILE" -c "$COOKIEFILE" "$URL") || return # Error - File not available ! match 'File not available' "$RESPONSE" || return $ERR_LINK_DEAD # Download limit if match "You reached your hourly traffic limit\." "$RESPONSE"; then WAIT=$(echo "$RESPONSE" | parse '' \ '\([[:digit:]]\+\) seconds\?') echo $((WAIT)) return $ERR_LINK_TEMP_UNAVAILABLE elif match "Sorry, you cant download more then [[:digit:]]\+ files\? at time\." "$RESPONSE"; then return $ERR_LINK_TEMP_UNAVAILABLE fi # Note: filename is

tag might be truncated FILENAME=$(echo "$RESPONSE" | parse 'http://bitshare\.com/files/' \ 'value="http://bitshare\.com/files/'"$FILE_ID"'/\(.*\)\.html"') || return # Premium account direct download FILE_URL=$(echo "$RESPONSE" | grep_http_header_location_quiet) || return if [ "$FILE_URL" ]; then log_debug 'using premium direct download' echo "$FILE_URL" echo "$FILENAME" return fi # Add cookie entry: ads_download=1 curl -b "$COOKIEFILE" -c "$COOKIEFILE" -o /dev/null \ "$BASE_URL/getads.html" || return # Get ajaxdl id AJAXDL=$(echo "$RESPONSE" | parse 'var ajaxdl = ' \ 'var ajaxdl = "\([^"]\+\)";') || return # Retrieve parameters # Example: file:60:1 DATA="request=generateID&ajaxid=$AJAXDL" RESPONSE=$(curl -b "$COOKIEFILE" --referer "$URL" --data "$DATA" \ "$POST_URL") || return if match '^ERROR' "$RESPONSE"; then log_error "failed in retrieving parameters: $RESPONSE" return $ERR_LINK_TEMP_UNAVAILABLE fi WAIT=$(echo "$RESPONSE" | parse ':' ':\([[:digit:]]\+\):') || return NEED_RECAPTCHA=$(echo "$RESPONSE" | parse ':' ':\([^:]\+\)$') || return if [ "$NEED_RECAPTCHA" -eq 1 ]; then log_debug 'need recaptcha' else log_debug 'no recaptcha needed' fi wait $WAIT seconds || return # ReCaptcha if [ "$NEED_RECAPTCHA" -eq 1 ]; then local PUBKEY WCI CHALLENGE WORD ID RECAPTCHA_RESULT PUBKEY='6LdtjrwSAAAAACepq37DE6GDMp1TxvdbW5ui0rdE' WCI=$(recaptcha_process $PUBKEY) || return { read WORD; read CHALLENGE; read ID; } <<<"$WCI" DATA="request=validateCaptcha&ajaxid=$AJAXDL&recaptcha_challenge_field=$CHALLENGE&recaptcha_response_field=$WORD" RECAPTCHA_RESULT=$(curl -b "$COOKIEFILE" --referer "$URL" --data "$DATA" \ "$POST_URL") || return if ! match '^SUCCESS:\?' "$RECAPTCHA_RESULT"; then log_error 'Wrong captcha' captcha_nack $ID return $ERR_CAPTCHA fi captcha_ack $ID log_debug 'correct captcha' fi # Get file url DATA="request=getDownloadURL&ajaxid=$AJAXDL" RESPONSE=$(curl -b "$COOKIEFILE" --referer "$URL" --data "$DATA" \ "$POST_URL") || return if match 'ERROR#' "$RESPONSE"; then log_error "getting file url fail: $RESPONSE" return $ERR_LINK_TEMP_UNAVAILABLE fi FILE_URL=$(echo "$RESPONSE" | parse 'SUCCESS#' '^SUCCESS#\(.*\)$') echo "$FILE_URL" echo "$FILENAME" } # Upload a file to bitshare # Need md5sum or md5 when using the openapi method # $1: cookie file # $2: file path or remote url # $3: remote filename # stdout: bitshare download link and delete link bitshare_upload() { if [ -z "$METHOD" -o "$METHOD" = 'openapi' ]; then if [ -n "$HASHKEY" ]; then [ -z "$AUTH" ] || \ log_error 'Both --hashkey & --auth_free are defined. Taking hashkey.' elif [ -n "$AUTH" ]; then # Login to openapi local USER PASSWORD PASSWORD_HASH RESPONSE HASHKEY split_auth "$AUTH" USER PASSWORD || return PASSWORD_HASH=$(md5 "$PASSWORD") || return RESPONSE=$(curl --form-string "user=$USER" \ --form-string "password=$PASSWORD_HASH" \ 'http://bitshare.com/api/openapi/login.php') || return if ! match '^SUCCESS:' "$RESPONSE"; then return $ERR_LOGIN_FAILED fi HASHKEY="${RESPONSE:8}" log_debug "successful login to openapi as $USER member, hashkey: $HASHKEY" fi bitshare_upload_openapi "$HASHKEY" "$2" "$3" || return elif [ "$METHOD" = form ]; then if match_remote_url "$2"; then log_error 'Remote upload is not supported with this method. Use openapi method.' return $ERR_FATAL fi bitshare_upload_form "$AUTH" "$1" "$2" "$3" || return else log_error 'Unknown method (check --method parameter)' return $ERR_FATAL fi } # Upload a file to bitshare using openapi # Official API: http://bitshare.com/openAPI.html # $1: hashkey # $2: file path or remote url # $3: remote filename bitshare_upload_openapi() { local HASHKEY=$1 local FILE=$2 local REMOTE_FILENAME=$3 local UPLOAD_URL='http://bitshare.com/api/openapi/upload.php' local RESPONSE MAX_SIZE SIZE FILESERVER_URL DOWNLOAD_URL DELETE_URL if match_remote_url "$FILE"; then if [ -z "$HASHKEY" ]; then log_error 'Remote upload requires an account' return $ERR_LINK_NEED_PERMISSIONS fi # Remote url upload local REMOTE_UPLOAD_KEY RESPONSE=$(curl --form-string 'action=addRemoteUpload' \ -F "hashkey=$HASHKEY" \ -F "url=$FILE" \ "$UPLOAD_URL") || return if ! match '^SUCCESS:' "$RESPONSE"; then log_error "Failed in adding url: $RESPONSE" return $ERR_FATAL fi REMOTE_UPLOAD_KEY="${RESPONSE:8}" log_debug "remote upload key: $REMOTE_UPLOAD_KEY" while :; do wait 60 || return RESPONSE=$(curl --form-string 'action=remoteUploadStatus' \ -F "hashkey=$HASHKEY" \ -F "key=$REMOTE_UPLOAD_KEY" \ "$UPLOAD_URL") || return if ! match '^SUCCESS:' "$RESPONSE"; then log_error "Failed in retrieving upload status: $RESPONSE" break fi RESPONSE=${RESPONSE:8} if match '^Finished#' "$RESPONSE"; then local FILE_URL=${RESPONSE:9} # Do we need to rename file ? if [ "$REMOTE_FILENAME" != dummy ]; then local RESPONSE2 FILEID_INT FILEID_URL UPLOAD_URL='http://bitshare.com/api/openapi/filestructure.php' RESPONSE2=$(curl --form-string 'action=getfiles' \ --form-string 'mainfolder=0' \ -F "hashkey=$HASHKEY" \ "$UPLOAD_URL") || return FILEID_URL=$(echo "$FILE_URL" | parse . '/files/\([^/]\+\)') FILEID_INT=$(echo "$RESPONSE2" | parse_quiet "$FILEID_URL" '^\([^#]\+\)') if [ -n "$FILEID_INT" ]; then RESPONSE2=$(curl --form-string 'action=renamefile' \ -F "hashkey=$HASHKEY" \ -F "name=$REMOTE_FILENAME" \ -F "file=$FILEID_INT" \ "$UPLOAD_URL") || return if ! match '^SUCCESS:' "$RESPONSE2"; then log_error "Failed to rename file: $RESPONSE2" fi else log_debug "can't find file id, cannot rename" fi fi echo "$FILE_URL" return 0 elif match '^Failed#' "$RESPONSE"; then log_error "Remote download failed: $RESPONSE" break else # Pending, Processing, Downloading log_debug "status: ${RESPONSE/\#/: }" fi done return $ERR_FATAL fi # Get max file size # RESPONSE=SUCCESS:[max. filesize]#[max. entries] RESPONSE=$(curl --form-string 'action=maxFileSize' \ -F "hashkey=$HASHKEY" \ "$UPLOAD_URL") || return if ! match '^SUCCESS:' "$RESPONSE"; then log_error "Failed in getting max file size: $RESPONSE" return $ERR_FATAL fi RESPONSE=${RESPONSE:8} MAX_SIZE=${RESPONSE%%#*} SIZE=$(get_filesize "$FILE") if [ $SIZE -gt "$MAX_SIZE" ]; then log_debug "file is bigger than $MAX_SIZE" return $ERR_SIZE_LIMIT_EXCEEDED fi # Get fileserver url # RESPONSE=SUCCESS:[fileserver url] RESPONSE=$(curl --form-string 'action=getFileserver' \ "$UPLOAD_URL") || return if ! match '^SUCCESS:' "$RESPONSE"; then log_error "Failed in getting file server url: $RESPONSE" return $ERR_FATAL fi FILESERVER_URL="${RESPONSE:8}" log_debug "file server: $FILESERVER_URL" # Upload # RESPONSE=SUCCESS:[downloadlink]#[bblink]#[htmllink]#[shortlink]#[deletelink] RESPONSE=$(curl_with_log \ -F "hashkey=$HASHKEY" \ -F "filesize=$SIZE" \ -F "file=@$FILE;filename=$REMOTE_FILENAME" \ "$FILESERVER_URL") || return if ! match '^SUCCESS:' "$RESPONSE"; then log_error "Failed in uploading: $RESPONSE" return $ERR_FATAL fi DOWNLOAD_URL=$(echo "$RESPONSE" | parse '#' '^SUCCESS:\([^#]\+\)') || return DELETE_URL=${RESPONSE##*#} echo "$DOWNLOAD_URL" echo "$DELETE_URL" } # Upload file to bitshare using html form # $1: authentication # $2: cookie file # $3: file path # $4: remote filename bitshare_upload_form() { local AUTH=$1 local COOKIEFILE=$2 local FILE=$3 local REMOTE_FILENAME=$4 local BASE_URL='http://bitshare.com' local HTML DOWNLOAD_URL DELETE_URL # Set website language to english (language_selection=EN) curl -c "$COOKIEFILE" -o /dev/null "$BASE_URL/?language=EN" || return # Login if test "$AUTH"; then bitshare_login "$AUTH" "$COOKIEFILE" || return fi HTML=$(curl -b "$COOKIEFILE" -c "$COOKIEFILE" "$BASE_URL") || return # Get file size local SIZE MAX_SIZE MAX_SIZE=$(echo "$HTML" | parse 'Maximum file size' \ 'Maximum file size \([[:digit:]]\+\) Mbyte') || return MAX_SIZE=$((MAX_SIZE*1048576)) SIZE=$(get_filesize "$FILE") if [ $SIZE -gt $MAX_SIZE ]; then log_debug "file is bigger than $MAX_SIZE" return $ERR_SIZE_LIMIT_EXCEEDED fi # Extract form parameters local FORM ACTION PROGRESS_KEY USERGROUP_KEY UPLOAD_IDENTIFIER RESPONSE FORM=$(grep_form_by_id "$HTML" uploadform) || return ACTION=$(echo "$FORM" | parse_form_action) || return PROGRESS_KEY=$(echo "$FORM" | parse_form_input_by_id 'progress_key') || return USERGROUP_KEY=$(echo "$FORM" | parse_form_input_by_id 'usergroup_key') || return UPLOAD_IDENTIFIER=$(echo "$FORM" | parse_form_input_by_name 'UPLOAD_IDENTIFIER') || return # Upload RESPONSE=$(curl_with_log -L --referer "$BASE_URL/" -b "$COOKIEFILE" \ --form-string APC_UPLOAD_PROGRESS="$PROGRESS_KEY" \ --form-string APC_UPLOAD_USERGROUP="$USERGROUP_KEY" \ --form-string UPLOAD_IDENTIFIER="$UPLOAD_IDENTIFIER" \ -F file[]='@/dev/null;filename=' \ -F file[]="@$FILE;filename=$REMOTE_FILENAME" \ "${ACTION}?X-Progress-ID=undefined$(random h 32)") || return DOWNLOAD_URL=$(echo "$RESPONSE" | parse \ 'Download:' 'value="\([^"]\+\)"' 1) || return DELETE_URL=$(echo "$RESPONSE" | parse \ 'Delete link:' 'value="\([^"]\+\)"' 1) || return echo "$DOWNLOAD_URL" echo "$DELETE_URL" } # Delete a file from Bitshare # $1: cookie file (unused here) # $2: bitshare (delete) link bitshare_delete() { local -r URL=$2 local PAGE PAGE=$(curl -b 'language_selection=EN' "$URL") || return match '

Wrong Link

' "$PAGE" && return $ERR_LINK_DEAD # Note: Page tries to show captcha. It's broken, but deletion works anyway. if match '

Delete File?

' "$PAGE"; then PAGE=$(curl -d 'sum' -d 'submit=Delete' "$URL") || return if match '

File deleted!

' "$PAGE"; then return 0 fi fi log_error 'Unexpected content. Site updated?' return $ERR_FATAL } # Probe a download URL # $1: cookie file # $2: bitshare url # $3: requested capability list # stdout: 1 capability per line bitshare_probe() { local -r COOKIE_FILE=$1 local -r URL=$2 local -r REQ_IN=$3 local PAGE REQ_OUT FILE_NAME FILE_SIZE PAGE=$(curl -c "$COOKIE_FILE" -b 'language_selection=EN' "$URL") || return ! match 'File not available' "$PAGE" || return $ERR_LINK_DEAD REQ_OUT=c # Filename can be truncated if [[ $REQ_IN = *f* ]]; then FILE_NAME=$(echo "$PAGE" | parse_tag title) FILE_NAME=${FILE_NAME#Download } FILE_NAME=${FILE_NAME% - BitShare.com - Free File Hosting and Cloud Storage} test "$FILE_NAME" && echo "$FILE_NAME" && REQ_OUT="${REQ_OUT}f" fi if [[ $REQ_IN = *s* ]]; then FILE_SIZE=$(echo "$PAGE" | parse '

' \ '[[:space:]]-[[:space:]]\([[:digit:]]\+\(\.[[:digit:]]\+\)\?[[:space:]][KM]B\)yte') && \ translate_size "$FILE_SIZE" && REQ_OUT="${REQ_OUT}s" fi echo $REQ_OUT } catshare.sh000066400000000000000000000132021320177646300132130ustar00rootroot00000000000000# Plowshare catshare.net module # Copyright (c) 2016 Plowshare team # # This file is part of Plowshare. # # Plowshare is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Plowshare is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Plowshare. If not, see . MODULE_CATSHARE_REGEXP_URL='https\?://\([[:alnum:]]\+\.\)\?catshare\.\(net\|xup\.pl\)/' MODULE_CATSHARE_DOWNLOAD_OPTIONS=" AUTH,a,auth,a=USER:PASSWORD,User account" MODULE_CATSHARE_DOWNLOAD_RESUME=no MODULE_CATSHARE_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no MODULE_CATSHARE_DOWNLOAD_SUCCESSIVE_INTERVAL= MODULE_CATSHARE_PROBE_OPTIONS="" # Static function. Proceed with login # $1: authentication # $2: cookie file # $3: base URL # stdout: account type ("free" or "premium") on success catshare_login() { local -r AUTH=$1 local -r COOKIE_FILE=$2 local -r BASE_URL=$3 local LOGIN_DATA PAGE STATUS NAME TYPE LOGIN_DATA='user_email=$USER&user_password=$PASSWORD&remindPassword=0' PAGE=$(post_login "$AUTH" "$COOKIE_FILE" "$LOGIN_DATA" \ "$BASE_URL/login" -L) || return # If successful an entry is added into a cookie file: session_id STATUS=$(parse_cookie_quiet 'session_id' < "$COOKIE_FILE") [ -z "$STATUS" ] && return $ERR_LOGIN_FAILED NAME=$(parse_quiet '"/account"' 'i>\([^<]*\)\([^<]*\)Error 404" "$PAGE"; then return $ERR_LINK_DEAD fi # If this is a premium download, we already have a download link. if [ "$ACCOUNT" = 'premium' ]; then MODULE_CATSHARE_DOWNLOAD_RESUME=yes # Get a download link, if this was a direct download. FILE_URL=$(grep_http_header_location_quiet <<< "$PAGE") if [ -z "$FILE_URL" ]; then FILE_URL=$(parse_attr '' 'action' <<< "$PAGE") || return fi echo "$FILE_URL" return 0 fi WAIT_TIME=$(parse 'var count = ' 'var count = \([0-9]\+\)' <<< "$PAGE") || return # Note: If we wait more then 5 minutes then we definitely reached downloads limit. if [[ $WAIT_TIME -gt 300 ]]; then log_error 'Download limit reached.' echo $WAIT_TIME return $ERR_LINK_TEMP_UNAVAILABLE fi wait $WAIT_TIME || return local PUBKEY WCI CHALLENGE WORD ID # http://www.google.com/recaptcha/api/challenge?k= PUBKEY=$(parse 'recaptcha.*?k=' '?k=\([[:alnum:]_-.]\+\)' <<< "$PAGE") || return WCI=$(recaptcha_process $PUBKEY) || return { read WORD; read CHALLENGE; read ID; } <<< "$WCI" PAGE=$(curl -b "$COOKIE_FILE" \ -d "recaptcha_challenge_field=$CHALLENGE" \ -d "recaptcha_response_field=$WORD" \ "$URL") || return FILE_URL=$(parse_attr_quiet '' 'action' <<< "$PAGE") if [ -z "$FILE_URL" ]; then captcha_nack $ID log_error 'Wrong captcha' return $ERR_CAPTCHA fi captcha_ack $ID log_debug 'Correct captcha' echo "$FILE_URL" } # Probe a download URL # $1: cookie file (unused here) # $2: catshare.net url # $3: requested capability list # stdout: 1 capability per line catshare_probe() { local -r URL=$2 local -r REQ_IN=$3 local PAGE FILE_SIZE REQ_OUT PAGE=$(curl -L "$URL") || return if match "Nasz serwis wykrył że Twój adres IP nie pochodzi z Polski." "$PAGE"; then log_error 'Free downloads are only allowed from Poland IP addresses.' return $ERR_LINK_NEED_PERMISSIONS elif match "Podany plik został usunięty\|Error 404" "$PAGE"; then return $ERR_LINK_DEAD fi REQ_OUT=c if [[ $REQ_IN = *f* ]]; then parse_tag 'class="pull-left"' h3 <<< "$PAGE" && REQ_OUT="${REQ_OUT}f" fi if [[ $REQ_IN = *s* ]]; then FILE_SIZE=$(parse_tag 'class="pull-right"' h3 <<< "$PAGE") \ && FILE_SIZE=$(replace 'B' 'iB' <<< $FILE_SIZE) \ && translate_size "$FILE_SIZE" && REQ_OUT="${REQ_OUT}s" fi if [[ $REQ_IN = *i* ]]; then parse 'property="og:url"' '.*/\([[:alnum:]]\+\)"' <<< "$PAGE" \ && REQ_OUT="${REQ_OUT}i" fi echo $REQ_OUT } chomikuj.sh000066400000000000000000000340111320177646300132330ustar00rootroot00000000000000# Plowshare chomikuj.pl module # Copyright (c) 2013 Plowshare team # # This file is part of Plowshare. # # Plowshare is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Plowshare is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Plowshare. If not, see . MODULE_CHOMIKUJ_REGEXP_URL='http://\(www\.\)\?chomikuj\.pl/' MODULE_CHOMIKUJ_DOWNLOAD_OPTIONS=" AUTH,a,auth,a=USER:PASSWORD,User account (used to download private files)" MODULE_CHOMIKUJ_DOWNLOAD_RESUME=yes MODULE_CHOMIKUJ_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no MODULE_CHOMIKUJ_DOWNLOAD_SUCCESSIVE_INTERVAL= MODULE_CHOMIKUJ_UPLOAD_OPTIONS=" AUTH,a,auth,a=USER:PASSWORD,User account FOLDER,,folder,s=FOLDER,Folder to upload files into DESCRIPTION,d,description,S=DESCRIPTION,Set file description" MODULE_CHOMIKUJ_UPLOAD_REMOTE_SUPPORT=no MODULE_CHOMIKUJ_LIST_OPTIONS=" LINK_PASSWORD,p,link-password,S=PASSWORD,Used in password-protected folders" MODULE_CHOMIKUJ_LIST_HAS_SUBFOLDERS=yes MODULE_CHOMIKUJ_PROBE_OPTIONS="" # Static function. Proceed with login. # $1: authentication # $2: cookie file # $3: base URL # stdout: token # user id chomikuj_login() { local -r AUTH=$1 local -r COOKIE_FILE=$2 local -r BASE_URL=$3 local PAGE VERIF_TOKEN USER_ID LOGIN_DATA LOGIN_RESULT PAGE=$(curl -c "$COOKIE_FILE" "$BASE_URL/") || return VERIF_TOKEN=$(parse_form_input_by_name '__RequestVerificationToken' <<< "$PAGE") || return LOGIN_DATA="ReturnUrl=&Login=\$USER&Password=\$PASSWORD&rememberLogin=false&__RequestVerificationToken=$VERIF_TOKEN" LOGIN_RESULT=$(post_login "$AUTH" "$COOKIE_FILE" "$LOGIN_DATA" \ "$BASE_URL/action/Login/TopBarLogin" \ -b "$COOKIE_FILE" \ -H 'X-Requested-With: XMLHttpRequest') || return if match '"IsSuccess":false' "$LOGIN_RESULT" || ! match '"Type":"Redirect"' "$LOGIN_RESULT"; then return $ERR_LOGIN_FAILED fi REDIRECT=$(parse_json 'redirectUrl' <<< "$LOGIN_RESULT") || return PAGE=$(curl -b "$COOKIE_FILE" "$BASE_URL$REDIRECT") || return VERIF_TOKEN=$(parse_form_input_by_name '__RequestVerificationToken' <<< "$PAGE") || return USER_ID=$(parse_form_input_by_name 'chomikId' <<< "$PAGE") || return echo "$VERIF_TOKEN" echo "$USER_ID" } # Check if specified folder name is valid. # Cannot be two folders with the same name in root. # $1: folder name selected by user # $2: cookie file (logged into account) # $3: base URL # $4: user data (token and user id) # stdout: folder ID chomikuj_check_folder() { local -r NAME=$1 local -r COOKIE_FILE=$2 local -r BASE_URL=$3 local -r USER_DATA=$4 local PAGE VERIF_TOKEN USER_ID FOLDERS FOLDER_ID { read VERIF_TOKEN; read USER_ID; } <<<"$USER_DATA" PAGE=$(curl -b "$COOKIE_FILE" \ -H 'X-Requested-With: XMLHttpRequest' \ -d 'FolderId=0' \ -d "ChomikId=$USER_ID" \ --data-urlencode "__RequestVerificationToken=$VERIF_TOKEN" \ "$BASE_URL/action/tree/loadtree") || return PAGE=$(replace_all '' $'\n' <<< "$PAGE") || return FOLDERS=$(parse_all_attr 'id="Ta_' 'title' <<< "$PAGE") || return if ! match "^$NAME$" "$FOLDERS"; then log_debug 'Creating folder.' PAGE=$(curl -b "$COOKIE_FILE" \ -H 'X-Requested-With: XMLHttpRequest' \ -d 'FolderId=0' \ -d "ChomikId=$USER_ID" \ -d "FolderName=$NAME" \ -d 'AdultContent=false' \ -d 'Password=' \ --data-urlencode "__RequestVerificationToken=$VERIF_TOKEN" \ "$BASE_URL/action/FolderOptions/NewFolderAction") || return if ! match '"IsSuccess":true' "$PAGE"; then log_error "Could not create folder." return $ERR_FATAL fi PAGE=$(curl -b "$COOKIE_FILE" \ -H 'X-Requested-With: XMLHttpRequest' \ -d 'FolderId=0' \ -d "ChomikId=$USER_ID" \ --data-urlencode "__RequestVerificationToken=$VERIF_TOKEN" \ "$BASE_URL/action/tree/loadtree") || return PAGE=$(replace_all '' $'\n' <<< "$PAGE") || return FOLDERS=$(parse_all_attr 'id="Ta_' 'title' <<< "$PAGE") || return if ! match "^$NAME$" "$FOLDERS"; then log_error 'Could not create folder.' return $ERR_FATAL fi fi FOLDER_ID=$(parse_attr "title=\"$NAME\" id=\"Ta_" 'rel' <<< "$PAGE") || return log_debug "Folder ID: '$FOLDER_ID'" echo "$FOLDER_ID" } # Output a chomikuj.pl file download URL # $1: cookie file # $2: chomikuj.pl url # stdout: real file download link chomikuj_download() { local -r COOKIE_FILE=$1 local -r URL=$(replace '://www.' '://' <<< "$2") local -r BASE_URL='http://chomikuj.pl' local PAGE VERIF_TOKEN FILE_ID FILE_URL FILENAME if [ -n "$AUTH" ]; then chomikuj_login "$AUTH" "$COOKIE_FILE" "$BASE_URL" >/dev/null || return fi PAGE=$(curl -i -c "$COOKIE_FILE" -b "$COOKIE_FILE" "$URL") || return LOCATION=$(grep_http_header_location_quiet <<< "$PAGE") if [ -n "$LOCATION" ] || \ match '404 Not Found' "$PAGE"; then return $ERR_LINK_DEAD fi PAGE=$(replace_all '\(.*\)' <<< "$PAGE") else LINKS=$(parse_all_quiet 'expanderHeader downloadAction' '\(href="[^"]\+\)' <<< "$PAGE") NAMES=$(parse_all_quiet 'expanderHeader downloadAction' '\(.*\)$' 1 <<< "$PAGE") fi PAGES_BAR=$(parse_quiet 'paginator' '
\(.*\)' <<< "$PAGE") else LINKS=$LINKS$'\n'$(parse_all_quiet 'expanderHeader downloadAction' '\(href="[^"]\+\)' <<< "$PAGE") NAMES=$NAMES$'\n'$(parse_all_quiet 'expanderHeader downloadAction' '\(.*\)$' 1 <<< "$PAGE") fi done fi [ -n "$COOKIE_FILE" ] && rm -f "$COOKIE_FILE" LINKS=$(replace_all 'href="' "$BASE_URL" <<< "$LINKS") NAMES=$(replace_all '' '' <<< "$NAMES") list_submit "$LINKS" "$NAMES" # Are there any subfolders? if [ -n "$REC" ]; then local FOLDERS FOLDER FOLDERS=$(parse_all_quiet \ '^[[:space:]]*[[:space:]]*$' \ '\(href="[^"]\+\)' <<< "$PAGE") || return FOLDERS=$(replace_all 'href="' "$BASE_URL" <<< "$FOLDERS") while read FOLDER; do [ -z "$FOLDER" ] && continue log_debug "Entering sub folder: $FOLDER" chomikuj_list "$FOLDER" "$REC" done <<< "$FOLDERS" fi return 0 } config000066400000000000000000000151061320177646300122620ustar00rootroot00000000000000# Site list with capabilities 115 | download | | | | | 180upload | download | upload | | | probe | 1fichier | download | upload | delete | list | probe | 2shared | download | upload | delete | | probe | 4share_vn | download | upload | | list | probe | 4shared | download | upload | | list | probe | anonfiles | download | upload | | | probe | bayfiles | download | upload | delete | | probe | bayimg | download | upload | delete | | probe | bigfile | download | upload | delete | list | probe | billionuploads | download | upload | | | probe | bitshare | download | upload | delete | | probe | catshare | download | | | | probe | chomikuj | download | upload | | list | probe | crocko | download | upload | | list | probe | data_hu | download | upload | delete | | | datafile | download | | | | probe | dataport_cz | download | upload | delete | | | depositfiles | download | upload | delete | list | probe | directmirror | | upload | | list | | divshare | download | upload | delete | list | probe | dl_free_fr | download | upload | delete | | probe | embedupload | | | | list | | espafiles | download | | | | probe | euroshare_eu | download | upload | delete | | probe | exoshare | | upload | | list | | faststore | download | | | | probe | fboom_me | download | | | | probe | filebin_ca | download | upload | | | probe | filecloud | download | upload | | list | probe | filecore | download | | | | probe | filedais | download | | | | probe | filefactory | download | upload | | list | probe | filejoker | download | | | | probe | filemonkey | | upload | | | | fileover | download | | | | probe | fileparadox | | upload | | | | filepost | download | upload | delete | list | probe | filepup_net | download | | | | probe | filer_net | download | | | list | probe | fileshark | download | | | | probe | firedrive | download | | | | probe | flashx | download | | | | probe | freakshare | download | upload | | | probe | fshare_vn | download | upload | | list | probe | gamefront | download | upload | | | probe | ge_tt | download | upload | | list | probe | gfile_ru | download | upload | | | probe | go4up | | upload | delete | list | | hdstream_to | download | upload | | | probe | hexupload | download | | | | probe | hipfile | download | upload | delete | list | probe | hotlink_cc | download | | | | probe | jheberg | | upload | | list | probe | keep2share | download | upload | | | probe | letitbit | download | upload | delete | list | probe | lunaticfiles | download | | | | probe | mediafire | download | upload | | list | probe | megashares | download | upload | delete | | probe | mirrorcreator | | upload | | list | | mirrorupload | | upload | | list | | multiup_org | | upload | | list | probe | multiupload | download | upload | | list | | myvdrive | download | upload | | list | probe | nakido | download | | | | probe | netkups | download | upload | | | probe | netload_in | download | upload | | list | probe | nitroflare | download | | | | probe | nowdownload_co | download | upload | | | probe | oboom | download | upload | | list | probe | openload | download | upload | | | probe | pastebin | | | | list | | prefiles | download | | | | probe | promptfile | download | | | | probe | rapidgator | download | upload | delete | list | probe | rapidu | download | | | | probe | rghost | download | upload | | | probe | rockfile_eu | | upload | delete | | | ryushare | download | upload | | | probe | salefiles | download | | | | probe | sendspace | download | upload | delete | list | probe | sharebeast | download | upload | delete | | probe | sharehost | download | | | | probe | shareonline_biz | download | upload | | | probe | sockshare | download | upload | delete | list | probe | solidfiles | download | upload | | list | probe | tempsend | download | upload | | | probe | tempshare | download | | | | probe | tezfiles | download | | | | probe | thefilebay | download | upload | | | probe | turbobit | download | upload | delete | list | probe | uloz_to | download | | | | probe | ultramegabit | download | upload | | | probe | uplea | download | | | | probe | upload_cd | download | | | | probe | uploadboy | download | | | | probe | uploading | download | upload | | | probe | uploaded_net | download | upload | delete | list | probe | uploadrocket | download | upload | delete | | probe | upstore | download | upload | | | probe | uptobox | download | upload | | | probe | videowood_tv | | upload | | | | vidzi_tv | | upload | | | | vid_ag | | upload | | | | yourvideohost | download | | | | probe | zalaa | download | upload | | list | probe | ziddu | download | upload | | | probe | zippyshare | download | upload | | list | probe | crocko.sh000066400000000000000000000307261320177646300127130ustar00rootroot00000000000000# Plowshare crocko.com module # Copyright (c) 2013 Plowshare team # # This file is part of Plowshare. # # Plowshare is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Plowshare is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Plowshare. If not, see . MODULE_CROCKO_REGEXP_URL='https\?://\(www\.\)\?crocko\.com/' MODULE_CROCKO_DOWNLOAD_OPTIONS=" AUTH,a,auth,a=USER:PASSWORD,User account API,,api,,Use API to upload file API_KEY,,api-key,s=API_KEY,Provide API key to use instead of login:pass. Can be used without --api option." MODULE_CROCKO_DOWNLOAD_RESUME=no MODULE_CROCKO_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no MODULE_CROCKO_DOWNLOAD_SUCCESSIVE_INTERVAL= MODULE_CROCKO_UPLOAD_OPTIONS=" AUTH,a,auth,a=USER:PASSWORD,User account FOLDER,,folder,s=FOLDER,Folder to upload files into PREMIUM,,premium,,Make file inaccessible to non-premium users" MODULE_CROCKO_UPLOAD_REMOTE_SUPPORT=no MODULE_CROCKO_LIST_OPTIONS="" MODULE_CROCKO_LIST_HAS_SUBFOLDERS=yes MODULE_CROCKO_PROBE_OPTIONS="" # Static function. Proceed with login. # $1: authentication # $2: cookie file # $3: base URL crocko_login() { local -r AUTH=$1 local -r COOKIE_FILE=$2 local -r BASE_URL=$3 local PAGE LOGIN_DATA LOGIN_RESULT LOGIN_FLAG LOGIN_DATA='success_llocation=&login=$USER&password=$PASSWORD&remember=1' LOGIN_RESULT=$(post_login "$AUTH" "$COOKIE_FILE" "$LOGIN_DATA" \ "$BASE_URL/accounts/login") || return LOGIN_FLAG=$(parse_cookie_quiet 'logacc' < "$COOKIE_FILE") if [ "$LOGIN_FLAG" != '1' ]; then return $ERR_LOGIN_FAILED fi } # Get API key # $1: authentication # $2: base URL crocko_login_api() { local -r AUTH=$1 local -r BASE_URL=$2 local PAGE USER PASSWORD ERROR_CODE MESSAGE split_auth "$AUTH" USER PASSWORD || return PAGE=$(curl \ -H 'Accept: application/atom+xml' \ -d "login=$USER" \ -d "password=$PASSWORD" \ "$BASE_URL/apikeys") || return ERROR_CODE=$(parse_tag '^<' 'title' <<< "$PAGE") || return MESSAGE=$(parse_tag 'content' <<< "$PAGE") || return if [ "$ERROR_CODE" = 'apikey' ]; then echo "$MESSAGE" return 0 elif [ "$ERROR_CODE" = 'errorWrongCredentials' ]; then return $ERR_LOGIN_FAILED else echo "Unknown remote error $ERROR_CODE: $MESSAGE" return $ERR_FATAL fi } # Output a crocko.com file download URL # $1: cookie file # $2: crocko.com url # stdout: real file download link crocko_download() { local -r COOKIE_FILE=$1 local -r URL=$2 local -r BASE_URL=$(basename_url "$URL") local PAGE WAIT_TIME CAPTCHA_SCRIPT FORM_CAPTCHA FILE_URL FILE_ID FILE_NAME if [ -n "$API" ] && [ -z "$AUTH" -a -z "$API_KEY" ]; then log_error 'You must provide -a logn:pass or --api-key for API.' return $ERR_BAD_COMMAND_LINE fi if [ -n "$AUTH" -o -n "$API_KEY" ]; then local FILE_CODE ERROR_CODE MESSAGE FILE_CODE=$(parse . 'crocko.com/\([^/]\+\)' <<< "$URL") || return if [ -z "$API_KEY" ]; then if [ -z "$API" ]; then log_error 'Use --api option or provide --api-key to use account.' return $ERR_BAD_COMMAND_LINE fi API_KEY=$(crocko_login_api "$AUTH" 'http://api.crocko.com') || return fi PAGE=$(curl \ -H "Accept: application/atom+xml" \ -H "Authorization: $API_KEY" \ "http://api.crocko.com/files/$FILE_CODE;DirectLink") || return if match 'Wrong apikey' "$PAGE"; then log_error 'Wrong API key.' return $ERR_LOGIN_FAILED fi ERROR_CODE=$(parse_tag_quiet '^<' 'title' <<< "$PAGE") MESSAGE=$(parse_tag_quiet 'content' <<< "$PAGE") || return if [ -z "$ERROR_CODE" ]; then FILE_NAME=$(parse_tag_all 'title' <<< "$PAGE") || return FILE_NAME=$(last_line <<< "$FILE_NAME") FILE_URL=$(parse_attr 'link' 'href' <<< "$PAGE") || return MODULE_CROCKO_DOWNLOAD_RESUME=yes echo "$FILE_URL" echo "$FILE_NAME" return 0 fi if [ "$ERROR_CODE" = 'errorPermissionDenied' ]; then log_error 'Your account is not premium.' return $ERR_LINK_NEED_PERMISSIONS elif [ "$ERROR_CODE" = 'errorFileNotFound' ] || \ [ "$ERROR_CODE" = 'errorFileInNoDownloadedStatus' ]; then return $ERR_LINK_DEAD else echo "Unknown remote error $ERROR_CODE: $MESSAGE" return $ERR_FATAL fi fi PAGE=$(curl -c "$COOKIE_FILE" -b 'language=en' "$URL") || return if match 'Crocko.com 404' "$PAGE" || \ match 'File not found' "$PAGE"; then return $ERR_LINK_DEAD elif match 'You need Premium membership to download this file' "$PAGE"; then return $ERR_LINK_NEED_PERMISSIONS elif match 'There is another download in progress from your IP' "$PAGE"; then log_error 'There is another download in progress from your IP.' return $ERR_LINK_TEMP_UNAVAILABLE fi if ! match 'Recaptcha.create("' "$PAGE"; then WAIT_TIME=$(parse "w='" "w='\([0-9]\+\)" <<< "$PAGE") || return CAPTCHA_SCRIPT=$(parse "u='" "u='\([^']\+\)" <<< "$PAGE") || return if (( $WAIT_TIME > 300 )); then echo "$WAIT_TIME" return $ERR_LINK_TEMP_UNAVAILABLE fi wait $WAIT_TIME || return PAGE=$(curl -b "$COOKIE_FILE" -b 'language=en' "$BASE_URL$CAPTCHA_SCRIPT") || return fi if match 'There is another download in progress from your IP' "$PAGE"; then log_error 'There is another download in progress from your IP.' return $ERR_LINK_TEMP_UNAVAILABLE fi FILE_URL=$(parse_attr 'file_contents' 'action' <<< "$PAGE") || return FILE_ID=$(parse_form_input_by_name 'id' <<< "$PAGE") || return local PUBKEY WCI CHALLENGE WORD ID PUBKEY=$(parse 'Recaptcha.create("' 'Recaptcha.create("\([^"]\+\)' <<< "$PAGE") || return WCI=$(recaptcha_process $PUBKEY) || return { read WORD; read CHALLENGE; read ID; } <<<"$WCI" FORM_CAPTCHA="-d recaptcha_challenge_field=$CHALLENGE -d recaptcha_response_field=$WORD" # Temporary file & HTTP headers local TMP_FILE TMP_FILE_H TMP_FILE=$(create_tempfile '.crocko') || return TMP_FILE_H=$(create_tempfile '.crocko_h') || return # Need to download now, no other way to check captcha curl_with_log \ -D "$TMP_FILE_H" \ -o "$TMP_FILE" \ -b "$COOKIE_FILE" \ $FORM_CAPTCHA \ -d "id=$FILE_ID" \ "$FILE_URL" || return if match "text/html" "$(grep_http_header_content_type < "$TMP_FILE_H")"; then rm -f "$TMP_FILE_H" "$TMP_FILE" captcha_nack $ID log_error 'Wrong captcha' return $ERR_CAPTCHA fi captcha_ack $ID log_debug 'Correct captcha' FILE_NAME=$(grep_http_header_content_disposition < "$TMP_FILE_H") || return rm -f "$TMP_FILE_H" echo "file://$TMP_FILE" echo "$FILE_NAME" } # Upload a file to crocko.com # $1: cookie file # $2: input file (with full path) # $3: remote filename # stdout: download link crocko_upload() { local -r COOKIE_FILE=$1 local -r FILE=$2 local -r DEST_FILE=$3 local -r BASE_URL='http://www.crocko.com' local PAGE SESSION_ID FILE_URL FILE_DEL_URL FILE_ID if [ -z "$AUTH" ]; then if [ -n "$FOLDER" ]; then log_error 'You must be registered to use folders.' return $ERR_LINK_NEED_PERMISSIONS elif [ -n "$PREMIUM" ]; then log_error 'You must be registered to set premium only flag.' return $ERR_LINK_NEED_PERMISSIONS fi fi if [ -n "$AUTH" ]; then crocko_login "$AUTH" "$COOKIE_FILE" "$BASE_URL" || return SESSION_ID=$(parse_cookie 'PHPSESSID' < "$COOKIE_FILE") || return fi PAGE=$(curl_with_log \ -F "Filename=$DEST_FILE" \ -F "PHPSESSID=$SESSION_ID" \ -F 'Upload=Submit Query' \ -F "Filedata=@$FILE;filename=$DEST_FILE" \ 'http://wwwupload.crocko.com/accounts/upload_backend/perform/ajax') || return if match 'You exceed upload limit for Free account' "$PAGE"; then log_error 'You exceed upload limit for Free account.' return $ERR_FATAL fi FILE_URL=$(parse_attr 'input' 'value' <<< "$PAGE") || return #FILE_DEL_URL=$(parse_tag 'class="del"' 'a' <<< "$PAGE") || return if [ -n "$AUTH" ] && [ -n "$FOLDER" -o -n "$PREMIUM" ]; then FILE_ID=$(parse_quiet 'createFolder(' 'createFolder(\([0-9]\+\)' <<< "$PAGE") [ -z "$FILE_ID" ] && log_error 'Could not get folder ID.' fi if [ -n "$FILE_ID" -a -n "$FOLDER" ]; then local FOLDERS FOLDER_ID FOLDERS=$(parse_all_tag_quiet 'option' <<< "$PAGE") FOLDERS=$(delete_last_line <<< "$FOLDERS") FOLDERS=$(parse_all_quiet '|---' '|--- \(.*\)$' <<< "$FOLDERS") if ! match "^$FOLDER$" "$FOLDERS"; then log_debug 'Creating folder...' PAGE=$(curl -b "$COOKIE_FILE" \ -H 'X-Requested-With: XMLHttpRequest' \ "$BASE_URL/upload/change_folder/0/$FILE_ID/$FOLDER") || return if match '^[0-9]\+$' "$PAGE"; then FOLDER_ID="$PAGE" fi else FOLDER_ID=$(parse_attr_quiet "' 'href' <<< "$PAGE") NAMES=$(parse_all_tag_quiet '>download' 'div' <<< "$PAGE") if [ -z "$LINKS" ]; then return $ERR_LINK_DEAD fi list_submit "$LINKS" "$NAMES" if [ -n "$REC" ]; then local FOLDERS FOLDER FOLDERS=$(parse_all_attr_quiet '/f/' 'href' <<< "$PAGE") while read FOLDER; do [ -z "$FOLDER" ] && continue log_debug "Entering sub folder: $FOLDER" crocko_list "$FOLDER" "$REC" && RET=0 done <<< "$FOLDERS" fi } data_hu.sh000066400000000000000000000117501320177646300130340ustar00rootroot00000000000000# Plowshare data.hu module # Copyright (c) 2010-2012 Plowshare team # # This file is part of Plowshare. # # Plowshare is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Plowshare is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Plowshare. If not, see . MODULE_DATA_HU_REGEXP_URL='http://\(www\.\)\?data.hu/' MODULE_DATA_HU_DOWNLOAD_OPTIONS="" MODULE_DATA_HU_DOWNLOAD_RESUME=yes MODULE_DATA_HU_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=unused MODULE_DATA_HU_DOWNLOAD_SUCCESSIVE_INTERVAL= MODULE_DATA_HU_UPLOAD_OPTIONS=" AUTH_FREE,b,auth-free,a=EMAIL:PASSWORD,Free account (mandatory)" MODULE_DATA_HU_DELETE_OPTIONS="" # Static function. Proceed with login # $1: authentication # $2: cookie file # $3: base URL data_hu_login() { local -r AUTH_FREE=$1 local -r COOKIE_FILE=$2 local -r BASE_URL=$3 local RND LOGIN_DATA JSON ERR USER RND=$(random h 32) || return LOGIN_DATA="act=dologin&login_passfield=login_$RND&target=%2Findex.php&t=&id=&data=&url_for_login=%2Findex.php%3Fisl%3D1&need_redirect=1&username=\$USER&login_$RND=\$PASSWORD" JSON=$(post_login "$AUTH_FREE" "$COOKIE_FILE" "$LOGIN_DATA" \ "$BASE_URL/login.php" -H 'X-Requested-With: XMLHttpRequest') || return ERR=$(echo "$JSON" | parse_json 'error') || return if [ "$ERR" != 0 ]; then ERR=$(echo "$JSON" | parse_json 'message') || return match 'Sikeres bel\u00e9p\u00e9s!' "$ERR" && return $ERR_LOGIN_FAILED log_error "Remote error: $ERR" return $ERR_FATAL fi split_auth "$AUTH_FREE" USER || return log_debug "Successfully logged in as member '$USER'" } # Output a data_hu file download URL # $1: cookie file # $2: data.hu url # stdout: real file download link # file name data_hu_download() { local -r COOKIE_FILE=$1 local -r URL=$2 local -r BASE_URL='http://data.hu' local PAGE if [ -n "$AUTH_FREE" ]; then data_hu_login "$AUTH_FREE" "$COOKIE_FILE" "$BASE_URL" || return fi PAGE=$(curl -b "$COOKIE_FILE" -L "$URL") || return match "/missing.php" "$PAGE" && return $ERR_LINK_DEAD # Extract + output download link and file name echo "$PAGE" | parse_attr 'download_box_button' 'href' || return echo "$PAGE" | parse_tag 'download_filename' 'div' || return } # Upload a file to Data.hu # $1: cookie file # $2: input file (with full path) # $3: remote filename # stdout: data.hu download link data_hu_upload() { local -r COOKIE_FILE=$1 local -r FILE=$2 local -r DEST_FILE=$3 local -r BASE_URL='http://data.hu' local PAGE UP_URL FORM SIZE MAX_SIZE MID SID [ -n "$AUTH_FREE" ] || return $ERR_LINK_NEED_PERMISSIONS data_hu_login "$AUTH_FREE" "$COOKIE_FILE" "$BASE_URL" || return PAGE=$(curl -b "$COOKIE_FILE" "$BASE_URL/index.php?isl=1") || return UP_URL=$(echo "$PAGE" | \ parse_attr '