/dev/null || return
PAGE=$(curl -b "$COOKIE_FILE" -b 'lang=english' "$BASE_URL") || return
MAX_SIZE=$(parse 'Up to ' 'to \([[:digit:]]\+\) Mb' <<< "$PAGE") || return
readonly MAX_SIZE=$(( MAX_SIZE * 1048576 )) # convert MiB to B
SIZE=$(get_filesize "$FILE") || return
if [ "$SIZE" -gt "$MAX_SIZE" ]; then
log_debug "file is bigger than $MAX_SIZE"
return $ERR_SIZE_LIMIT_EXCEEDED
fi
FORM_HTML=$(grep_form_by_name "$PAGE" 'file') || return
FORM_ACTION=$(parse_form_action <<< "$FORM_HTML") || return
FORM_UTYPE=$(parse_form_input_by_name 'upload_type' <<< "$FORM_HTML") || return
FORM_SESS=$(parse_form_input_by_name_quiet 'sess_id' <<< "$FORM_HTML")
FORM_TMP_SRV=$(parse_form_input_by_name 'srv_tmp_url' <<< "$FORM_HTML") || return
log_debug "Server URL: '$FORM_TMP_SRV'"
UPLOAD_ID=$(random dec 12)
PAGE=$(curl "${FORM_TMP_SRV}/status.html?${UPLOAD_ID}=$DEST_FILE=180upload.com") || return
# Sanity check. Avoid failure after effective upload
if match '>404 Not Found<' "$PAGE"; then
log_error 'upstream error (404)'
return $ERR_FATAL
fi
PAGE=$(curl_with_log --include -b "$COOKIE_FILE" \
-F "upload_type=$FORM_UTYPE" -F "sess_id=$FORM_SESS" \
-F "srv_tmp_url=$FORM_TMP_SRV" -F "file_1=@$FILE;filename=$DEST_FILE" \
--form-string "file_1_descr=$DESCRIPTION" \
--form-string "link_rcpt=$TOEMAIL" \
-F 'tos=1' -F 'submit_btn= Upload! ' \
"${FORM_ACTION}${UPLOAD_ID}") || return
STATUS_URL=$(grep_http_header_location <<< "$PAGE") || return
PAGE=$(curl -b "$COOKIE_FILE" -b 'lang=english' $STATUS_URL) || return
# Parse and output download and delete link
parse 'Download Link' '>\(http[^<]\+\)<' 1 <<< "$PAGE" || return
parse 'Delete Link' '>\(http[^<]\+\)<' 1 <<< "$PAGE" || return
}
# Probe a download URL
# $1: cookie file (unused here)
# $2: 180upload url
# $3: requested capability list
# stdout: 1 capability per line
180upload_probe() {
local -r URL=$2
local -r REQ_IN=$3
local PAGE REQ_OUT FILE_SIZE
PAGE=$(curl -L -b 'lang=english' "$URL") || return
if match '
File Not Found
' "$PAGE"; then
return $ERR_LINK_DEAD
fi
REQ_OUT=c
# Note: all info parsed from HTML comments on the page
if [[ $REQ_IN = *f* ]]; then
parse_tag 'center nowrap' 'b' <<< "$PAGE" && REQ_OUT="${REQ_OUT}f"
fi
if [[ $REQ_IN = *s* ]]; then
FILE_SIZE=$(parse_tag 'Size:' 'small' <<< "$PAGE") && \
FILE_SIZE=${FILE_SIZE#(} && FILE_SIZE=${FILE_SIZE% bytes)} && \
echo "$FILE_SIZE" && REQ_OUT="${REQ_OUT}s"
fi
echo $REQ_OUT
}
1fichier.sh 0000664 0000000 0000000 00000030635 12651225562 0013121 0 ustar 00root root 0000000 0000000 # Plowshare 1fichier.com module
# Copyright (c) 2011 halfman
# Copyright (c) 2012-2015 Plowshare team
#
# This file is part of Plowshare.
#
# Plowshare is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Plowshare is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Plowshare. If not, see .
MODULE_1FICHIER_REGEXP_URL='https\?://\(.*\.\)\?\(1fichier\.\(com\|net\|org\|fr\)\|alterupload\.com\|cjoint\.\(net\|org\)\|desfichiers\.\(com\|net\|org\|fr\)\|dfichiers\.\(com\|net\|org\|fr\)\|megadl\.fr\|mesfichiers\.\(net\|org\)\|piecejointe\.\(net\|org\)\|pjointe\.\(com\|net\|org\|fr\)\|tenvoi\.\(com\|net\|org\)\|dl4free\.com\)'
MODULE_1FICHIER_DOWNLOAD_OPTIONS="
AUTH,a,auth,a=USER:PASSWORD,Premium account
LINK_PASSWORD,p,link-password,S=PASSWORD,Used in password-protected files
RESTRICT,,restrictip,,Restrict login session to my IP address"
MODULE_1FICHIER_DOWNLOAD_RESUME=yes
MODULE_1FICHIER_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no
MODULE_1FICHIER_DOWNLOAD_SUCCESSIVE_INTERVAL=
MODULE_1FICHIER_UPLOAD_OPTIONS="
AUTH,a,auth,a=USER:PASSWORD,User account
LINK_PASSWORD,p,link-password,S=PASSWORD,Protect a link with a password
MESSAGE,d,message,S=MESSAGE,Set file message (is send with notification email)
DOMAIN,,domain,N=ID,You can set domain ID to upload (ID can be found at http://www.1fichier.com/en/api/web.html)
TOEMAIL,,email-to,e=EMAIL, field for notification email
RESTRICT,,restrictip,,Restrict login session to my IP address"
MODULE_1FICHIER_UPLOAD_REMOTE_SUPPORT=no
MODULE_1FICHIER_LIST_OPTIONS=""
MODULE_1FICHIER_LIST_HAS_SUBFOLDERS=no
MODULE_1FICHIER_DELETE_OPTIONS=""
MODULE_1FICHIER_PROBE_OPTIONS=""
# Static function. Proceed with login
1fichier_login() {
local -r AUTH=$1
local -r COOKIE_FILE=$2
local -r BASE_URL=$3
local LOGIN_DATA LOGIN_RESULT SID
# Long session lt=on
# Restrict the session to my IP address purge=on
# Purge old sessions restrict=on
LOGIN_DATA='mail=$USER&pass=$PASSWORD<=on&purge=on&secure=on&Login=Login'
[ -z "$RESTRICT" ] || LOGIN_DATA="$LOGIN_DATA&restrict=on"
LOGIN_RESULT=$(post_login "$AUTH" "$COOKIE_FILE" "$LOGIN_DATA" \
"$BASE_URL/login.pl") || return
# You are logged in. This page will redirect you.
SID=$(parse_cookie_quiet 'SID' < "$COOKIE_FILE") || return
[ -n "$SID" ] || return $ERR_LOGIN_FAILED
#PAGE=$(curl -b "$COOKIE_FILE" -b 'LG=en' 'https://1fichier.com/console/index.pl') || return
}
# Static function. Proper way to get file information
# $1: 1fichier url
# stdout: string (with ; as separator)
1fichier_checklink() {
local S FID
S=$(curl --form-string "links[]=$1" 'https://1fichier.com/check_links.pl') || return
# Note: Password protected links return
# url;;;PRIVATE
if [ "${S##*;}" = 'BAD LINK' ]; then
log_debug 'obsolete link format?'
return $ERR_LINK_DEAD
elif [ "${S##*;}" = 'NOT FOUND' ]; then
return $ERR_LINK_DEAD
fi
echo "$S"
}
# Output a 1fichier file download URL
# $1: cookie file (account only)
# $2: 1fichier url
# stdout: real file download link
#
# Note: Consecutive HTTP requests must be delayed (>10s).
# Otherwise you'll get the parallel download message.
1fichier_download() {
local -r COOKIE_FILE=$1
local URL=$(replace 'http://' 'https://' <<< "$2")
local FID PAGE FILE_URL FILE_NAME WAIT CV SESS
FID=$(parse_quiet . '://\([[:alnum:]]*\)\.' <<< "$URL")
if [ -n "$FID" ] && [ "$FID" != '1fichier' ]; then
URL="https://1fichier.com/?$FID"
fi
if CV=$(storage_get 'cookie_file'); then
echo "$CV" >"$COOKIE_FILE"
# Check for expired session
PAGE=$(curl -b "$COOKIE_FILE" -b LG=en "https://1fichier.com/console/index.pl") || return
if ! match '>[[:space:]]*\(My files\|Logout\)<' "$PAGE"; then
log_error 'Expired session, delete cache entry'
storage_set 'cookie_file'
echo 1
return $ERR_LINK_TEMP_UNAVAILABLE
fi
SESS=$(parse_cookie 'SID' < "$COOKIE_FILE")
log_debug "session (cached): '$SESS'"
elif [ -n "$AUTH" ]; then
1fichier_login "$AUTH" "$COOKIE_FILE" 'https://1fichier.com' || return
storage_set 'cookie_file' "$(cat "$COOKIE_FILE")"
SESS=$(parse_cookie 'SID' < "$COOKIE_FILE")
log_debug "session (new): '$SESS'"
fi
FILE_URL=$(curl --head -b "$COOKIE_FILE" "$URL" | \
grep_http_header_location_quiet)
PAGE=$(1fichier_checklink "$URL") || return
IFS=';' read -r _ FILE_NAME _ <<< "$PAGE"
if [ -z "$FILE_NAME" ]; then
log_error 'This must be a direct download link with password, filename will be wrong!'
fi
if [ -n "$FILE_URL" ]; then
echo "$FILE_URL"
echo "$FILE_NAME"
return 0
fi
PAGE=$(curl -b 'LG=en' "$URL") || return
# Location: http://www.1fichier.com/?c=SCAN
if match 'MOVED - TEMPORARY_REDIRECT' "$PAGE"; then
return $ERR_LINK_TEMP_UNAVAILABLE
fi
# The requested file could not be found
# The file may have been deleted by its owner.
# The requested file has been deleted following an abuse request.
if match 'The \(requested \)\?file \(could not be found\|.*been deleted\)' "$PAGE"; then
return $ERR_LINK_DEAD
fi
# Warning ! Without premium status, you can download only one file at a time
if match 'Warning ! Without premium status,' "$PAGE"; then
log_error 'No parallel download allowed.'
echo 300
return $ERR_LINK_TEMP_UNAVAILABLE
# Warning ! Without Premium, you must wait between downloads.
You must wait 9 minutes
elif match 'Warning ! Without Premium,' "$PAGE"; then
WAIT=$(parse 'Warning ! Without' 'You must wait \([[:digit:]]\+\) minute' 1 <<< "$PAGE") || WAIT=1
echo $((WAIT * 60))
return $ERR_LINK_TEMP_UNAVAILABLE
# Please wait until the file has been scanned by our anti-virus
elif match 'Please wait until the file has been scanned' "$PAGE"; then
log_error 'File is scanned for viruses.'
return $ERR_LINK_TEMP_UNAVAILABLE
fi
# Accessing this file is protected by password.