From 05a63ae322941e807a917d83bed8348202e22807 Mon Sep 17 00:00:00 2001 From: gobo72 Date: Sun, 28 Aug 2011 00:58:20 +0000 Subject: [PATCH] =?utf8?q?Am=C3=A9lioration=20de=20la=20d=C3=A9tection=20d?= =?utf8?q?es=20fichiers=20incomplets/corrompus=20lors=20du=20t=C3=A9l?= =?utf8?q?=C3=A9chargement=20avec=20wget?= MIME-Version: 1.0 Content-Type: text/plain; charset=utf8 Content-Transfer-Encoding: 8bit --- functions/fpkg | 139 +++++++++++++++++++++++++++++++++++++++---------- 1 file changed, 111 insertions(+), 28 deletions(-) diff --git a/functions/fpkg b/functions/fpkg index 7a17a83..fed9089 100644 --- a/functions/fpkg +++ b/functions/fpkg @@ -16,8 +16,13 @@ BLFS_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-blfs.html CLFS_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-clfs.html HV_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-hv.html +FTYPE_ARCHIVE="0" +FTYPE_PATCH="1" +FTYPE_OTHER="2" + # Test if the given file extension correspond to a tar/gzip archive -is_archive_tar_gzip() +# Arg. #1: File extension +is_extension_tar_gzip() { if [ ${#} -ne 1 ]; then echo "${FUNCNAME}(), wrong number of arguments: ${*}" @@ -35,6 +40,98 @@ is_archive_tar_gzip() return 1 } +# Test if the given file is an archive file +# Arg. #1: File name +is_archive() +{ + if [ ${#} -ne 1 ]; then + echo "${FUNCNAME}(), wrong number of arguments: ${*}" + return 1 + fi + + local F=${1} + + if file ${F} | grep -q --ignore-case -e "compressed data" -e "Zip archive data"; then + return 0; + else + return 1 + fi +} + +# Remove any partially downloaded file. +# Arg. #1: Partial file name without partial extension (.part). +remove_partial_file() +{ + rm -f ${1}.part +} + +# Download a file, and making sure it is valid (at least it's header!) +# Arg. #1: Source URL. +# Arg. #2: Source filename (on server) +# Arg. #3: File type: 0 : Archive +# 2 : Patch +# 1 : Other +# +# Return: 0 : success +# 1 : wget error +# 2 : File not found or not an archive +wget_wrapper() +{ + if [ ${#} -lt 2 -a ${#} -gt 3 ]; then + echo "${FUNCNAME}(), wrong number of arguments: ${*}" + return 1 + fi + + local URL=${1} + local SOURCE=${2} + local FTYPE=${3} + + remove_partial_file ${LFS_PKG_DIR}/${SOURCE} + + local STRING="Fetching ${SOURCE}" + if [ "x${FTYPE}" = "x${FTYPE_PATCH}" ]; then + STRING="${STRING} from ${URL}" + fi + + rcmd "${STRING}" ${WGETCMD} ${URL}/${SOURCE} \ + --output-document=${LFS_PKG_DIR}/${SOURCE}.part + wget_status=$? + + # Append log to global log file + cat ${WGET_LOG_FILE} >> ${LFS_LOG_FILE} + + # Total failure if it was a connection timeout. + if grep -q "failed: Connection timed out" ${WGET_LOG_FILE}; then + echo "Error, wget reported: Connection timed out" + return 1 + fi + + # Partial failure if file was not found. + if detect_file_not_found; then + remove_partial_file ${LFS_PKG_DIR}/${SOURCE} + return 2 + fi + + if [ ${wget_status} -ne 0 ]; then + remove_partial_file ${LFS_PKG_DIR}/${SOURCE} + return 1 + fi + + if [ "x${FTYPE}" = "x${FTYPE_ARCHIVE}" ]; then + # Just to be sure, test if downloaded file is really an archive: + if ! is_archive ${LFS_PKG_DIR}/${SOURCE}.part; then + # Partial failure if file is invalid. + remove_partial_file ${LFS_PKG_DIR}/${SOURCE} + return 2 + fi + fi + + # Rename temporary file to final name + mv ${LFS_PKG_DIR}/${SOURCE}{.part,} + + return 0 +} + # Arg. #1: URL for patches repository. # Arg. #2: Destination filename. static_fetch_patches_list() @@ -42,6 +139,7 @@ static_fetch_patches_list() PATCHES_URL=${1} PATCHES_LIST_FILENAME=${2} + # Appending a slash (/) will download the directory content as a file named index.html ${WGETCMD} "${PATCHES_URL}/" && # Append log to global log file @@ -124,7 +222,9 @@ static_checkpatch() PATCH_NAME=$(echo ${p} | sed s!%2B!\+!g) if [ ! -f ${LFS_PKG_DIR}/${PATCH_NAME} ]; then - rcmd "Fetching ${PATCH_NAME} from ${PATCHES_URL}" ${WGETCMD} ${PATCHES_URL}/${p} + # String uses $PATCH_NAME and not $p ??? + #####rcmd "Fetching ${PATCH_NAME} from ${PATCHES_URL}" wget_wrapper ${PATCHES_URL} ${p} ${FTYPE_OTHER} + wget_wrapper ${PATCHES_URL} ${p} ${FTYPE_PATCH} fi done fi @@ -172,7 +272,7 @@ detect_file_not_found() #echo "404 NOTFOUND" return 0 fi - + return 1 } @@ -203,32 +303,15 @@ static_getpkg() fi for arch_ext in ${PREFERRED_EXT}; do - # Don't take any chance: remove any partially downloaded file. - # If we arrive here, it means the final destination file was not found - # so we can safely remove any file prior to trying to download it. - rm -f ${LFS_PKG_DIR}/${PACK}.${arch_ext} - - rcmd "Fetching ${PACK}.${arch_ext}" \ - ${WGETCMD} ${URL}/${PACK}.${arch_ext} - wget_status=$? - - # Append log to global log file - cat ${WGET_LOG_FILE} >> ${LFS_LOG_FILE} - - # Failure: if it was a connection timeout, don't try for other file - # extensions. - if grep -q "failed: Connection timed out" ${WGET_LOG_FILE}; then - echo "Error, wget reported: Connection timed out" - return 1 - fi + wget_wrapper ${URL} ${PACK}.${arch_ext} ${FTYPE_ARCHIVE} + wget_wrapper_status=$? - if detect_file_not_found; then - # Try next archive extension if file was not found. - continue; + if [ ${wget_wrapper_status} -eq 1 ]; then + return 1; fi - if [ ${wget_status} -ne 0 ]; then - return 1 + if [ ${wget_wrapper_status} -eq 2 ]; then + continue; fi # If we are here, it means the file was successfully downloaded. @@ -236,7 +319,7 @@ static_getpkg() # Convert to bzip2 format if requested and if it is a tar/gz archive if [ -n "${PREFER_BZIP2_ARCHIVE}" ]; then - if is_archive_tar_gzip "${arch_ext}" ; then + if is_extension_tar_gzip "${arch_ext}" ; then rcmd "Converting ${PACK}.${arch_ext} to bzip2 format" \ gztobz2 ${LFS_PKG_DIR}/${PACK}.${arch_ext} FINAL_EXT="tar.bz2" @@ -274,7 +357,7 @@ test_archive_integrity() if [ "x${EXT}" = "xtar.bz2" ]; then TESTCMD="bzip2 -t" - elif is_archive_tar_gzip "${EXT}"; then + elif is_extension_tar_gzip "${EXT}"; then TESTCMD="gunzip -t" elif [ "x${EXT}" = "xtar.xz" ]; then TESTCMD="unxz -t" -- 2.20.1