X-Git-Url: http://gitweb.hugovil.com/?a=blobdiff_plain;f=functions%2Ffpkg;h=1fc6ec4e8b69054f3650c3c610eb95bb6417da4e;hb=498549fb1b100f0649a8fcee8a5f5a0e0593369a;hp=2d3b474613305b0ece484d7e1b33d32cd54e6933;hpb=564ec1bbb321286571c30da20b81620c33b18e6f;p=hvlinux.git diff --git a/functions/fpkg b/functions/fpkg index 2d3b474..1fc6ec4 100644 --- a/functions/fpkg +++ b/functions/fpkg @@ -16,8 +16,29 @@ BLFS_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-blfs.html CLFS_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-clfs.html HV_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-hv.html +# Test if the given file extension correspond to a compressed archive +# Arg. #1: File extension +is_extension_archive() +{ + if [ ${#} -ne 1 ]; then + echo "${FUNCNAME}(), wrong number of arguments: ${*}" + return 1 + fi + + local EXT=${1} + + for k in "gz" "tgz" "Z" "zip" "xz" "bz2"; do + if [ "x${EXT}" = "x${k}" ]; then + return 0 + fi + done + + return 1 +} + # Test if the given file extension correspond to a tar/gzip archive -is_archive_tar_gzip() +# Arg. #1: File extension +is_extension_tar_gzip() { if [ ${#} -ne 1 ]; then echo "${FUNCNAME}(), wrong number of arguments: ${*}" @@ -35,6 +56,98 @@ is_archive_tar_gzip() return 1 } +# Test if the given file is an archive file +# Arg. #1: File name +is_archive() +{ + if [ ${#} -ne 1 ]; then + echo "${FUNCNAME}(), wrong number of arguments: ${*}" + return 1 + fi + + local F=${1} + + if file ${F} | grep -q --ignore-case -e "compressed data" -e "Zip archive data"; then + return 0; + else + return 1 + fi +} + +# Remove any partially downloaded file. +# Arg. #1: Partial file name without partial extension (.part). +remove_partial_file() +{ + rm -f ${1}.part +} + +# Download a file, and making sure it is valid (at least it's header!) +# Arg. #1: Source URL. +# Arg. #2: Source filename (on server) +# +# Return: 0 : success +# 1 : wget error +# 2 : File not found or not an archive (if file extension was archive type) +wget_wrapper() +{ + if [ ${#} -lt 2 ]; then + echo "${FUNCNAME}(), wrong number of arguments: ${*}" + return 1 + fi + + local URL=${1} + local SOURCE=${2} + + remove_partial_file ${LFS_PKG_DIR}/${SOURCE} + + local STRING="Fetching ${SOURCE}" + if [ "x${FTYPE}" = "x${FTYPE_PATCH}" ]; then + STRING="${STRING} from ${URL}" + fi + + rcmd "${STRING}" ${WGETCMD} ${URL}/${SOURCE} \ + --output-document=${LFS_PKG_DIR}/${SOURCE}.part + wget_status=$? + + # Append log to global log file + cat ${WGET_LOG_FILE} >> ${LFS_LOG_FILE} + + # Total failure if it was a connection timeout. + if grep -q "failed: Connection timed out" ${WGET_LOG_FILE}; then + echo "Error, wget reported: Connection timed out" + return 1 + fi + + # Partial failure if file was not found. + if detect_file_not_found; then + remove_partial_file ${LFS_PKG_DIR}/${SOURCE} + return 2 + fi + + if [ ${wget_status} -ne 0 ]; then + echo "Error: wget returned error status ${wget_status}" >> \ + ${LFS_LOG_FILE} + remove_partial_file ${LFS_PKG_DIR}/${SOURCE} + return 1 + fi + + local FEXT=${SOURCE##*.} + if is_extension_archive "${FEXT}"; then + # Just to be sure, test if downloaded file is really an archive: + if ! is_archive ${LFS_PKG_DIR}/${SOURCE}.part; then + # Partial failure if file is invalid. + echo "Error: failed archive test" >> ${LFS_LOG_FILE} + remove_partial_file ${LFS_PKG_DIR}/${SOURCE} + return 2 + fi + fi + + # Rename temporary file to final name + mv ${LFS_PKG_DIR}/${SOURCE}{.part,} + + return 0 +} + # Arg. #1: URL for patches repository. # Arg. #2: Destination filename. static_fetch_patches_list() @@ -42,6 +155,7 @@ static_fetch_patches_list() PATCHES_URL=${1} PATCHES_LIST_FILENAME=${2} + # Appending a slash (/) will download the directory content as a file named index.html ${WGETCMD} "${PATCHES_URL}/" && # Append log to global log file @@ -61,6 +175,8 @@ update_packages_init() # downloading other packages export RCMD_NO_EXIT=1 + test_presence_of_packages_directory + # First create log directory if it does not exists. if [ ! -d ${LFS_LOG_DIR} ]; then install -m755 -d ${LFS_LOG_DIR} || exit 1 @@ -115,14 +231,24 @@ static_checkpatch() # Remplace les "+" par "%2B" local PACK_URL=$(echo $PACK | sed s!\+!%2B!g) - local PATCHES_FOUND=$(cat ${PATCHES_LIST} | grep "${PACK_URL}-" | sed "s/.*\(${PACK_URL}-.*\.patch\).*/\1/") + # Patches list formats (patches-list-*.html): + # LFS: a href="name.patch" + # hugovil: A HREF "dir/subdir/subdir/name.patch + # We must search for a patch beginning with either a slash or a " to avoid + # the possibility of having another package name within a patch name: + # if patch = Mesalib-8.0.4-llvm-3.1-fixes-1.patch + # then we could erroneously try to download patch "llvm-3.1-fixes-1.patch" + local PATCHES_FOUND=$(cat ${PATCHES_LIST} | \ + egrep "\"${PACK_URL}-|/${PACK_URL}-" | \ + sed "s/.*\(${PACK_URL}-.*\.patch\)\".*/\1/") + if [ -n "${PATCHES_FOUND}" ]; then for p in ${PATCHES_FOUND}; do # Remplace les "%2B" par "+" PATCH_NAME=$(echo ${p} | sed s!%2B!\+!g) if [ ! -f ${LFS_PKG_DIR}/${PATCH_NAME} ]; then - rcmd "Fetching ${PATCH_NAME} from ${PATCHES_URL}" ${WGETCMD} ${PATCHES_URL}/${p} + wget_wrapper ${PATCHES_URL} ${PATCH_NAME} fi done fi @@ -170,7 +296,7 @@ detect_file_not_found() #echo "404 NOTFOUND" return 0 fi - + return 1 } @@ -201,32 +327,15 @@ static_getpkg() fi for arch_ext in ${PREFERRED_EXT}; do - # Don't take any chance: remove any partially downloaded file. - # If we arrive here, it means the final destination file was not found - # so we can safely remove any file prior to trying to download it. - rm -f ${LFS_PKG_DIR}/${PACK}.${arch_ext} - - rcmd "Fetching ${PACK}.${arch_ext}" \ - ${WGETCMD} ${URL}/${PACK}.${arch_ext} - wget_status=$? - - # Append log to global log file - cat ${WGET_LOG_FILE} >> ${LFS_LOG_FILE} - - # Failure: if it was a connection timeout, don't try for other file - # extensions. - if grep -q "failed: Connection timed out" ${WGET_LOG_FILE}; then - echo "Error, wget reported: Connection timed out" - return 1 - fi + wget_wrapper ${URL} ${PACK}.${arch_ext} + wget_wrapper_status=$? - if detect_file_not_found; then - # Try next archive extension if file was not found. - continue; + if [ ${wget_wrapper_status} -eq 1 ]; then + return 1; fi - if [ ${wget_status} -ne 0 ]; then - return 1 + if [ ${wget_wrapper_status} -eq 2 ]; then + continue; fi # If we are here, it means the file was successfully downloaded. @@ -234,7 +343,7 @@ static_getpkg() # Convert to bzip2 format if requested and if it is a tar/gz archive if [ -n "${PREFER_BZIP2_ARCHIVE}" ]; then - if is_archive_tar_gzip "${arch_ext}" ; then + if is_extension_tar_gzip "${arch_ext}" ; then rcmd "Converting ${PACK}.${arch_ext} to bzip2 format" \ gztobz2 ${LFS_PKG_DIR}/${PACK}.${arch_ext} FINAL_EXT="tar.bz2" @@ -263,8 +372,8 @@ test_archive_integrity() local FILE=${2} local EXT=${3} - if [ ! -f ${FILE} ]; then - # The file may be absent, in this case simply abort without error + if [ ! -f ${FILE} -o -h ${FILE} ]; then + # The file may be absent, or a symbolic link. Abort without error return 0 fi @@ -272,10 +381,14 @@ test_archive_integrity() if [ "x${EXT}" = "xtar.bz2" ]; then TESTCMD="bzip2 -t" - elif is_archive_tar_gzip "${EXT}"; then + elif is_extension_tar_gzip "${EXT}"; then TESTCMD="gunzip -t" + elif [ "x${EXT}" = "xtar.xz" ]; then + TESTCMD="unxz -t" + elif [ "x${EXT}" = "xzip" ]; then + TESTCMD="unzip -t" else - # Can only test gzip and bzip2 archives + # Can only test gzip, bzip2, xz and zip archives. return 0 fi @@ -374,7 +487,7 @@ fpkg() fi if [ -z "${FILE_EXT}" ]; then # Default file extension is tar.gz - FILE_EXT="tar.gz" + FILE_EXT="tar.xz" fi ;; gnome) @@ -383,7 +496,11 @@ fpkg() # Default subdirectory on server SRC_DIR=$(get_pkg_name ${PACK}) fi - SRC_DIR="${SRC_DIR}/$(get_pkg_ver_base ${PACK})" + SRC_DIR="${SRC_DIR}/$(get_pkg_ver2 ${PACK})" + if [ -z "${FILE_EXT}" ]; then + # Default file extension is xz + FILE_EXT="tar.xz" + fi ;; sf) URL=${SOURCEFORGE_URL}