X-Git-Url: http://gitweb.hugovil.com/?a=blobdiff_plain;f=functions%2Ffpkg;h=1fc6ec4e8b69054f3650c3c610eb95bb6417da4e;hb=498549fb1b100f0649a8fcee8a5f5a0e0593369a;hp=3330eed0977401962e7b0c8882e8bdf4e5ef75c3;hpb=16f1a75d215e027ad746fc60dd10cd456f07f8d6;p=hvlinux.git diff --git a/functions/fpkg b/functions/fpkg index 3330eed..1fc6ec4 100644 --- a/functions/fpkg +++ b/functions/fpkg @@ -16,8 +16,29 @@ BLFS_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-blfs.html CLFS_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-clfs.html HV_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-hv.html +# Test if the given file extension correspond to a compressed archive +# Arg. #1: File extension +is_extension_archive() +{ + if [ ${#} -ne 1 ]; then + echo "${FUNCNAME}(), wrong number of arguments: ${*}" + return 1 + fi + + local EXT=${1} + + for k in "gz" "tgz" "Z" "zip" "xz" "bz2"; do + if [ "x${EXT}" = "x${k}" ]; then + return 0 + fi + done + + return 1 +} + # Test if the given file extension correspond to a tar/gzip archive -is_archive_tar_gzip() +# Arg. #1: File extension +is_extension_tar_gzip() { if [ ${#} -ne 1 ]; then echo "${FUNCNAME}(), wrong number of arguments: ${*}" @@ -35,6 +56,98 @@ is_archive_tar_gzip() return 1 } +# Test if the given file is an archive file +# Arg. #1: File name +is_archive() +{ + if [ ${#} -ne 1 ]; then + echo "${FUNCNAME}(), wrong number of arguments: ${*}" + return 1 + fi + + local F=${1} + + if file ${F} | grep -q --ignore-case -e "compressed data" -e "Zip archive data"; then + return 0; + else + return 1 + fi +} + +# Remove any partially downloaded file. +# Arg. #1: Partial file name without partial extension (.part). +remove_partial_file() +{ + rm -f ${1}.part +} + +# Download a file, and making sure it is valid (at least it's header!) +# Arg. #1: Source URL. +# Arg. #2: Source filename (on server) +# +# Return: 0 : success +# 1 : wget error +# 2 : File not found or not an archive (if file extension was archive type) +wget_wrapper() +{ + if [ ${#} -lt 2 ]; then + echo "${FUNCNAME}(), wrong number of arguments: ${*}" + return 1 + fi + + local URL=${1} + local SOURCE=${2} + + remove_partial_file ${LFS_PKG_DIR}/${SOURCE} + + local STRING="Fetching ${SOURCE}" + if [ "x${FTYPE}" = "x${FTYPE_PATCH}" ]; then + STRING="${STRING} from ${URL}" + fi + + rcmd "${STRING}" ${WGETCMD} ${URL}/${SOURCE} \ + --output-document=${LFS_PKG_DIR}/${SOURCE}.part + wget_status=$? + + # Append log to global log file + cat ${WGET_LOG_FILE} >> ${LFS_LOG_FILE} + + # Total failure if it was a connection timeout. + if grep -q "failed: Connection timed out" ${WGET_LOG_FILE}; then + echo "Error, wget reported: Connection timed out" + return 1 + fi + + # Partial failure if file was not found. + if detect_file_not_found; then + remove_partial_file ${LFS_PKG_DIR}/${SOURCE} + return 2 + fi + + if [ ${wget_status} -ne 0 ]; then + echo "Error: wget returned error status ${wget_status}" >> \ + ${LFS_LOG_FILE} + remove_partial_file ${LFS_PKG_DIR}/${SOURCE} + return 1 + fi + + local FEXT=${SOURCE##*.} + if is_extension_archive "${FEXT}"; then + # Just to be sure, test if downloaded file is really an archive: + if ! is_archive ${LFS_PKG_DIR}/${SOURCE}.part; then + # Partial failure if file is invalid. + echo "Error: failed archive test" >> ${LFS_LOG_FILE} + remove_partial_file ${LFS_PKG_DIR}/${SOURCE} + return 2 + fi + fi + + # Rename temporary file to final name + mv ${LFS_PKG_DIR}/${SOURCE}{.part,} + + return 0 +} + # Arg. #1: URL for patches repository. # Arg. #2: Destination filename. static_fetch_patches_list() @@ -42,6 +155,7 @@ static_fetch_patches_list() PATCHES_URL=${1} PATCHES_LIST_FILENAME=${2} + # Appending a slash (/) will download the directory content as a file named index.html ${WGETCMD} "${PATCHES_URL}/" && # Append log to global log file @@ -61,6 +175,8 @@ update_packages_init() # downloading other packages export RCMD_NO_EXIT=1 + test_presence_of_packages_directory + # First create log directory if it does not exists. if [ ! -d ${LFS_LOG_DIR} ]; then install -m755 -d ${LFS_LOG_DIR} || exit 1 @@ -115,14 +231,24 @@ static_checkpatch() # Remplace les "+" par "%2B" local PACK_URL=$(echo $PACK | sed s!\+!%2B!g) - local PATCHES_FOUND=$(cat ${PATCHES_LIST} | grep "${PACK_URL}-" | sed "s/.*\(${PACK_URL}-.*\.patch\).*/\1/") + # Patches list formats (patches-list-*.html): + # LFS: a href="name.patch" + # hugovil: A HREF "dir/subdir/subdir/name.patch + # We must search for a patch beginning with either a slash or a " to avoid + # the possibility of having another package name within a patch name: + # if patch = Mesalib-8.0.4-llvm-3.1-fixes-1.patch + # then we could erroneously try to download patch "llvm-3.1-fixes-1.patch" + local PATCHES_FOUND=$(cat ${PATCHES_LIST} | \ + egrep "\"${PACK_URL}-|/${PACK_URL}-" | \ + sed "s/.*\(${PACK_URL}-.*\.patch\)\".*/\1/") + if [ -n "${PATCHES_FOUND}" ]; then for p in ${PATCHES_FOUND}; do # Remplace les "%2B" par "+" PATCH_NAME=$(echo ${p} | sed s!%2B!\+!g) if [ ! -f ${LFS_PKG_DIR}/${PATCH_NAME} ]; then - rcmd "Fetching ${PATCH_NAME} from ${PATCHES_URL}" ${WGETCMD} ${PATCHES_URL}/${p} + wget_wrapper ${PATCHES_URL} ${PATCH_NAME} fi done fi @@ -165,12 +291,12 @@ detect_file_not_found() { # HTTP: will return "ERROR 404: Not Found" # FTP: will say "File not found" or "No such file" - if grep --ignore-case -e "not found" -e "no such file" ${WGET_LOG_FILE} \ - 1> /dev/null 2>&1; then + if grep -q --ignore-case -e "not found" -e "no such file" ${WGET_LOG_FILE}; \ + then #echo "404 NOTFOUND" return 0 fi - + return 1 } @@ -201,33 +327,15 @@ static_getpkg() fi for arch_ext in ${PREFERRED_EXT}; do - # Don't take any chance: remove any partially downloaded file. - # If we arrive here, it means the final destination file was not found - # so we can safely remove any file prior to trying to download it. - rm -f ${LFS_PKG_DIR}/${PACK}.${arch_ext} - - rcmd "Fetching ${PACK}.${arch_ext}" \ - ${WGETCMD} ${URL}/${PACK}.${arch_ext} - wget_status=$? - - # Append log to global log file - cat ${WGET_LOG_FILE} >> ${LFS_LOG_FILE} - - # Failure: if it was a connection timeout, don't try for other file - # extensions. - if grep "failed: Connection timed out" ${WGET_LOG_FILE} \ - 1> /dev/null 2>&1; then - echo "Error, wget reported: Connection timed out" - return 1 - fi + wget_wrapper ${URL} ${PACK}.${arch_ext} + wget_wrapper_status=$? - if detect_file_not_found; then - # Try next archive extension if file was not found. - continue; + if [ ${wget_wrapper_status} -eq 1 ]; then + return 1; fi - if [ ${wget_status} -ne 0 ]; then - return 1 + if [ ${wget_wrapper_status} -eq 2 ]; then + continue; fi # If we are here, it means the file was successfully downloaded. @@ -235,7 +343,7 @@ static_getpkg() # Convert to bzip2 format if requested and if it is a tar/gz archive if [ -n "${PREFER_BZIP2_ARCHIVE}" ]; then - if is_archive_tar_gzip "${arch_ext}" ; then + if is_extension_tar_gzip "${arch_ext}" ; then rcmd "Converting ${PACK}.${arch_ext} to bzip2 format" \ gztobz2 ${LFS_PKG_DIR}/${PACK}.${arch_ext} FINAL_EXT="tar.bz2" @@ -264,8 +372,8 @@ test_archive_integrity() local FILE=${2} local EXT=${3} - if [ ! -f ${FILE} ]; then - # The file may be absent, in this case simply abort without error + if [ ! -f ${FILE} -o -h ${FILE} ]; then + # The file may be absent, or a symbolic link. Abort without error return 0 fi @@ -273,10 +381,14 @@ test_archive_integrity() if [ "x${EXT}" = "xtar.bz2" ]; then TESTCMD="bzip2 -t" - elif is_archive_tar_gzip "${EXT}"; then + elif is_extension_tar_gzip "${EXT}"; then TESTCMD="gunzip -t" + elif [ "x${EXT}" = "xtar.xz" ]; then + TESTCMD="unxz -t" + elif [ "x${EXT}" = "xzip" ]; then + TESTCMD="unzip -t" else - # Can only test gzip and bzip2 archives + # Can only test gzip, bzip2, xz and zip archives. return 0 fi @@ -301,8 +413,8 @@ test_archive_integrity() # pm (Perl module via CPAN) # fd (freedesktop.org) # -o Option specific to mode -# -h Display this help and returns # -s Subdirectory on server +# -w First erase destination file if it exists (except in test mode) fpkg() { local ORIG_ARGS=${*} @@ -313,8 +425,9 @@ fpkg() local MODE_OPT="" local SRC_DIR="" local FD_SUBDIR_FINAL="" + unset ERASE_FIRST - while getopts "d:e:f:m:o:s:" flag ;do + while getopts "d:e:f:m:o:s:w" flag ;do case ${flag} in d) # Fetch directory (where to put file) @@ -337,6 +450,9 @@ fpkg() s) SRC_DIR=${OPTARG} ;; + w) + ERASE_FIRST="y" + ;; ?) echo "${FUNCNAME}(): Invalid option: ${OPTARG}." return 1 @@ -371,7 +487,7 @@ fpkg() fi if [ -z "${FILE_EXT}" ]; then # Default file extension is tar.gz - FILE_EXT="tar.gz" + FILE_EXT="tar.xz" fi ;; gnome) @@ -380,7 +496,11 @@ fpkg() # Default subdirectory on server SRC_DIR=$(get_pkg_name ${PACK}) fi - SRC_DIR="${SRC_DIR}/$(get_pkg_ver_base ${PACK})" + SRC_DIR="${SRC_DIR}/$(get_pkg_ver2 ${PACK})" + if [ -z "${FILE_EXT}" ]; then + # Default file extension is xz + FILE_EXT="tar.xz" + fi ;; sf) URL=${SOURCEFORGE_URL} @@ -507,9 +627,13 @@ fpkg() DEST_FILE=${DEST_DIR}/${PACK}.${FINAL_EXT} - if [ ! -f ${DEST_FILE} ]; then - if [ -z "${TEST_INTEGRITY}" ]; then - # Fetch package, unless we are testing integrity + if [ -z "${TEST_INTEGRITY}" ]; then + if [ -f ${DEST_FILE} -a -n "${ERASE_FIRST}" ]; then + rm ${DEST_FILE} + fi + + if [ ! -f ${DEST_FILE} ]; then + # Fetch package set +e static_getpkg ${SRC_FILENAME} ${URL} ${FILE_EXT} set -e @@ -523,9 +647,7 @@ fpkg() mv ${DOWNLOADED_FILE} ${DEST_FILE} fi fi - fi - - if [ -n "${TEST_INTEGRITY}" ]; then + else set +e test_archive_integrity "${PACK}" "${DEST_FILE}" "${FINAL_EXT}" set -e @@ -544,9 +666,10 @@ fpkg() # Rename any patch fetched (in fpkg call) and replace SOURCE by TARGET # in patch name. - if ls ${LFS_PKG_DIR}/${SRC_FILENAME}-*.patch 1> /dev/null 2>&1; then - echo "CMD=${SRC_FILENAME} ${PACK} ${LFS_PKG_DIR}/${SRC_FILENAME}-*.patch" - rename ${SRC_FILENAME} ${PACK} ${LFS_PKG_DIR}/${SRC_FILENAME}-*.patch + local PATCHES_LIST="${LFS_PKG_DIR}/${SRC_FILENAME}-*.patch" + if ls ${PATCHES_LIST} 1> /dev/null 2>&1; then + echo "CMD=${SRC_FILENAME} ${PACK} ${PATCHES_LIST}" + rename ${SRC_FILENAME} ${PACK} ${PATCHES_LIST} fi fi }