CLFS_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-clfs.html
HV_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-hv.html
+# Test if the given file extension correspond to a compressed archive
+# Arg. #1: File extension
+is_extension_archive()
+{
+ if [ ${#} -ne 1 ]; then
+ echo "${FUNCNAME}(), wrong number of arguments: ${*}"
+ return 1
+ fi
+
+ local EXT=${1}
+
+ for k in "gz" "tgz" "Z" "zip" "xz" "bz2"; do
+ if [ "x${EXT}" = "x${k}" ]; then
+ return 0
+ fi
+ done
+
+ return 1
+}
+
# Test if the given file extension correspond to a tar/gzip archive
-is_archive_tar_gzip()
+# Arg. #1: File extension
+is_extension_tar_gzip()
{
if [ ${#} -ne 1 ]; then
echo "${FUNCNAME}(), wrong number of arguments: ${*}"
return 1
}
+# Test if the given file is an archive file
+# Arg. #1: File name
+is_archive()
+{
+ if [ ${#} -ne 1 ]; then
+ echo "${FUNCNAME}(), wrong number of arguments: ${*}"
+ return 1
+ fi
+
+ local F=${1}
+
+ if file ${F} | grep -q --ignore-case -e "compressed data" -e "Zip archive data"; then
+ return 0;
+ else
+ return 1
+ fi
+}
+
+# Remove any partially downloaded file.
+# Arg. #1: Partial file name without partial extension (.part).
+remove_partial_file()
+{
+ rm -f ${1}.part
+}
+
+# Download a file, and making sure it is valid (at least it's header!)
+# Arg. #1: Source URL.
+# Arg. #2: Source filename (on server)
+#
+# Return: 0 : success
+# 1 : wget error
+# 2 : File not found or not an archive (if file extension was archive type)
+wget_wrapper()
+{
+ if [ ${#} -lt 2 ]; then
+ echo "${FUNCNAME}(), wrong number of arguments: ${*}"
+ return 1
+ fi
+
+ local URL=${1}
+ local SOURCE=${2}
+
+ remove_partial_file ${LFS_PKG_DIR}/${SOURCE}
+
+ local STRING="Fetching ${SOURCE}"
+ if [ "x${FTYPE}" = "x${FTYPE_PATCH}" ]; then
+ STRING="${STRING} from ${URL}"
+ fi
+
+ rcmd "${STRING}" ${WGETCMD} ${URL}/${SOURCE} \
+ --output-document=${LFS_PKG_DIR}/${SOURCE}.part
+ wget_status=$?
+
+ # Append log to global log file
+ cat ${WGET_LOG_FILE} >> ${LFS_LOG_FILE}
+
+ # Total failure if it was a connection timeout.
+ if grep -q "failed: Connection timed out" ${WGET_LOG_FILE}; then
+ echo "Error, wget reported: Connection timed out"
+ return 1
+ fi
+
+ # Partial failure if file was not found.
+ if detect_file_not_found; then
+ remove_partial_file ${LFS_PKG_DIR}/${SOURCE}
+ return 2
+ fi
+
+ if [ ${wget_status} -ne 0 ]; then
+ remove_partial_file ${LFS_PKG_DIR}/${SOURCE}
+ return 1
+ fi
+
+ local FEXT=${SOURCE##*.}
+ if is_extension_archive "${FEXT}"; then
+ # Just to be sure, test if downloaded file is really an archive:
+ if ! is_archive ${LFS_PKG_DIR}/${SOURCE}.part; then
+ # Partial failure if file is invalid.
+ remove_partial_file ${LFS_PKG_DIR}/${SOURCE}
+ return 2
+ fi
+ fi
+
+ # Rename temporary file to final name
+ mv ${LFS_PKG_DIR}/${SOURCE}{.part,}
+
+ return 0
+}
+
# Arg. #1: URL for patches repository.
# Arg. #2: Destination filename.
static_fetch_patches_list()
PATCHES_URL=${1}
PATCHES_LIST_FILENAME=${2}
+ # Appending a slash (/) will download the directory content as a file named index.html
${WGETCMD} "${PATCHES_URL}/" &&
# Append log to global log file
# downloading other packages
export RCMD_NO_EXIT=1
+ test_presence_of_packages_directory
+
# First create log directory if it does not exists.
if [ ! -d ${LFS_LOG_DIR} ]; then
install -m755 -d ${LFS_LOG_DIR} || exit 1
PATCH_NAME=$(echo ${p} | sed s!%2B!\+!g)
if [ ! -f ${LFS_PKG_DIR}/${PATCH_NAME} ]; then
- rcmd "Fetching ${PATCH_NAME} from ${PATCHES_URL}" ${WGETCMD} ${PATCHES_URL}/${p}
+ # String uses $PATCH_NAME and not $p ???
+ #####rcmd "Fetching ${PATCH_NAME} from ${PATCHES_URL}" wget_wrapper ${PATCHES_URL} ${p} ${FTYPE_OTHER}
+ wget_wrapper ${PATCHES_URL} ${p}
fi
done
fi
{
# HTTP: will return "ERROR 404: Not Found"
# FTP: will say "File not found" or "No such file"
- if grep --ignore-case -e "not found" -e "no such file" ${WGET_LOG_FILE} \
- 1> /dev/null 2>&1; then
+ if grep -q --ignore-case -e "not found" -e "no such file" ${WGET_LOG_FILE}; \
+ then
#echo "404 NOTFOUND"
return 0
fi
-
+
return 1
}
fi
for arch_ext in ${PREFERRED_EXT}; do
- # Don't take any chance: remove any partially downloaded file.
- # If we arrive here, it means the final destination file was not found
- # so we can safely remove any file prior to trying to download it.
- rm -f ${LFS_PKG_DIR}/${PACK}.${arch_ext}
-
- rcmd "Fetching ${PACK}.${arch_ext}" \
- ${WGETCMD} ${URL}/${PACK}.${arch_ext}
- wget_status=$?
-
- # Append log to global log file
- cat ${WGET_LOG_FILE} >> ${LFS_LOG_FILE}
-
- # Failure: if it was a connection timeout, don't try for other file
- # extensions.
- if grep "failed: Connection timed out" ${WGET_LOG_FILE} \
- 1> /dev/null 2>&1; then
- echo "Error, wget reported: Connection timed out"
- return 1
- fi
+ wget_wrapper ${URL} ${PACK}.${arch_ext}
+ wget_wrapper_status=$?
- if detect_file_not_found; then
- # Try next archive extension if file was not found.
- continue;
+ if [ ${wget_wrapper_status} -eq 1 ]; then
+ return 1;
fi
- if [ ${wget_status} -ne 0 ]; then
- return 1
+ if [ ${wget_wrapper_status} -eq 2 ]; then
+ continue;
fi
# If we are here, it means the file was successfully downloaded.
# Convert to bzip2 format if requested and if it is a tar/gz archive
if [ -n "${PREFER_BZIP2_ARCHIVE}" ]; then
- if is_archive_tar_gzip "${arch_ext}" ; then
+ if is_extension_tar_gzip "${arch_ext}" ; then
rcmd "Converting ${PACK}.${arch_ext} to bzip2 format" \
gztobz2 ${LFS_PKG_DIR}/${PACK}.${arch_ext}
FINAL_EXT="tar.bz2"
local FILE=${2}
local EXT=${3}
- if [ ! -f ${FILE} ]; then
- # The file may be absent, in this case simply abort without error
+ if [ ! -f ${FILE} -o -h ${FILE} ]; then
+ # The file may be absent, or a symbolic link. Abort without error
return 0
fi
if [ "x${EXT}" = "xtar.bz2" ]; then
TESTCMD="bzip2 -t"
- elif is_archive_tar_gzip "${EXT}"; then
+ elif is_extension_tar_gzip "${EXT}"; then
TESTCMD="gunzip -t"
+ elif [ "x${EXT}" = "xtar.xz" ]; then
+ TESTCMD="unxz -t"
+ elif [ "x${EXT}" = "xzip" ]; then
+ TESTCMD="unzip -t"
else
- # Can only test gzip and bzip2 archives
+ # Can only test gzip, bzip2, xz and zip archives.
return 0
fi
# pm (Perl module via CPAN)
# fd (freedesktop.org)
# -o Option specific to mode
-# -h Display this help and returns
# -s Subdirectory on server
+# -w First erase destination file if it exists (except in test mode)
fpkg()
{
local ORIG_ARGS=${*}
local MODE_OPT=""
local SRC_DIR=""
local FD_SUBDIR_FINAL=""
+ unset ERASE_FIRST
- while getopts "d:e:f:m:o:s:" flag ;do
+ while getopts "d:e:f:m:o:s:w" flag ;do
case ${flag} in
d)
# Fetch directory (where to put file)
s)
SRC_DIR=${OPTARG}
;;
+ w)
+ ERASE_FIRST="y"
+ ;;
?)
echo "${FUNCNAME}(): Invalid option: ${OPTARG}."
return 1
# Default subdirectory on server
SRC_DIR=$(get_pkg_name ${PACK})
fi
- SRC_DIR="${SRC_DIR}/$(get_pkg_ver_base ${PACK})"
+ SRC_DIR="${SRC_DIR}/$(get_pkg_ver2 ${PACK})"
;;
sf)
URL=${SOURCEFORGE_URL}
DEST_FILE=${DEST_DIR}/${PACK}.${FINAL_EXT}
- if [ ! -f ${DEST_FILE} ]; then
- if [ -z "${TEST_INTEGRITY}" ]; then
- # Fetch package, unless we are testing integrity
+ if [ -z "${TEST_INTEGRITY}" ]; then
+ if [ -f ${DEST_FILE} -a -n "${ERASE_FIRST}" ]; then
+ rm ${DEST_FILE}
+ fi
+
+ if [ ! -f ${DEST_FILE} ]; then
+ # Fetch package
set +e
static_getpkg ${SRC_FILENAME} ${URL} ${FILE_EXT}
set -e
mv ${DOWNLOADED_FILE} ${DEST_FILE}
fi
fi
- fi
-
- if [ -n "${TEST_INTEGRITY}" ]; then
+ else
set +e
test_archive_integrity "${PACK}" "${DEST_FILE}" "${FINAL_EXT}"
set -e
# Rename any patch fetched (in fpkg call) and replace SOURCE by TARGET
# in patch name.
- if ls ${LFS_PKG_DIR}/${SRC_FILENAME}-*.patch 1> /dev/null 2>&1; then
- echo "CMD=${SRC_FILENAME} ${PACK} ${LFS_PKG_DIR}/${SRC_FILENAME}-*.patch"
- rename ${SRC_FILENAME} ${PACK} ${LFS_PKG_DIR}/${SRC_FILENAME}-*.patch
+ local PATCHES_LIST="${LFS_PKG_DIR}/${SRC_FILENAME}-*.patch"
+ if ls ${PATCHES_LIST} 1> /dev/null 2>&1; then
+ echo "CMD=${SRC_FILENAME} ${PACK} ${PATCHES_LIST}"
+ rename ${SRC_FILENAME} ${PACK} ${PATCHES_LIST}
fi
fi
}