Add repository for older LFS patches on hugovil.com server
[hvlinux.git] / functions / fpkg
index dd855d9..b4ea48d 100644 (file)
@@ -15,9 +15,33 @@ LFS_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-lfs.html
 BLFS_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-blfs.html
 CLFS_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-clfs.html
 HV_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-hv.html
+LFS_OLD_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-lfs-old.html
+
+PATCHES_BLACKLIST=../config/patches.blacklist
+
+# Test if the given file extension correspond to a compressed archive
+# Arg. #1: File extension
+is_extension_archive()
+{
+    if [ ${#} -ne 1 ]; then
+       echo "${FUNCNAME}(), wrong number of arguments: ${*}"
+       return 1
+    fi
+
+    local EXT=${1}
+
+    for k in "gz" "tgz" "Z" "zip" "xz" "bz2"; do
+        if [ "x${EXT}" = "x${k}" ]; then
+            return 0
+        fi
+    done
+
+    return 1
+}
 
 # Test if the given file extension correspond to a tar/gzip archive
-is_archive_tar_gzip()
+# Arg. #1: File extension
+is_extension_tar_gzip()
 {
     if [ ${#} -ne 1 ]; then
        echo "${FUNCNAME}(), wrong number of arguments: ${*}"
@@ -35,6 +59,98 @@ is_archive_tar_gzip()
     return 1
 }
 
+# Test if the given file is an archive file
+# Arg. #1: File name
+is_archive()
+{
+    if [ ${#} -ne 1 ]; then
+       echo "${FUNCNAME}(), wrong number of arguments: ${*}"
+       return 1
+    fi
+
+    local F=${1}
+
+    if file ${F} | grep -q --ignore-case -e "compressed data" -e "Zip archive data"; then
+        return 0;
+    else
+        return 1
+    fi
+}
+
+# Remove any partially downloaded file.
+# Arg. #1: Partial file name without partial extension (.part).
+remove_partial_file()
+{
+    rm -f ${1}.part
+}
+
+# Download a file, and making sure it is valid (at least it's header!)
+# Arg. #1: Source URL.
+# Arg. #2: Source filename (on server)
+#
+# Return: 0 : success
+#         1 : wget error
+#         2 : File not found or not an archive (if file extension was archive type)
+wget_wrapper()
+{
+    if [ ${#} -lt 2 ]; then
+       echo "${FUNCNAME}(), wrong number of arguments: ${*}"
+       return 1
+    fi
+
+    local URL=${1}
+    local SOURCE=${2}
+
+    remove_partial_file ${LFS_PKG_DIR}/${SOURCE}
+
+    local STRING="Fetching ${SOURCE}"
+    if [ "x${FTYPE}" = "x${FTYPE_PATCH}" ]; then
+        STRING="${STRING} from ${URL}"
+    fi
+
+    rcmd "${STRING}" ${WGETCMD} ${URL}/${SOURCE} \
+        --output-document=${LFS_PKG_DIR}/${SOURCE}.part
+    wget_status=$?
+
+    # Append log to global log file
+    cat ${WGET_LOG_FILE} >> ${LFS_LOG_FILE}
+
+    # Total failure if it was a connection timeout.
+    if grep -q "failed: Connection timed out" ${WGET_LOG_FILE}; then
+        echo "Error, wget reported: Connection timed out"
+        return 1
+    fi
+
+    # Partial failure if file was not found.
+    if detect_file_not_found; then
+        remove_partial_file ${LFS_PKG_DIR}/${SOURCE}
+        return 2
+    fi
+
+    if [ ${wget_status} -ne 0 ]; then
+        echo "Error: wget returned error status ${wget_status}" >> \
+            ${LFS_LOG_FILE}
+        remove_partial_file ${LFS_PKG_DIR}/${SOURCE}
+        return 1
+    fi
+
+    local FEXT=${SOURCE##*.}
+    if is_extension_archive "${FEXT}"; then
+        # Just to be sure, test if downloaded file is really an archive:
+        if ! is_archive ${LFS_PKG_DIR}/${SOURCE}.part; then
+            # Partial failure if file is invalid.
+            echo "Error: failed archive test" >> ${LFS_LOG_FILE}
+            remove_partial_file ${LFS_PKG_DIR}/${SOURCE}
+            return 2
+        fi
+    fi
+
+    # Rename temporary file to final name
+    mv ${LFS_PKG_DIR}/${SOURCE}{.part,}
+
+    return 0
+}
+
 # Arg. #1: URL for patches repository.
 # Arg. #2: Destination filename.
 static_fetch_patches_list()
@@ -42,6 +158,7 @@ static_fetch_patches_list()
     PATCHES_URL=${1}
     PATCHES_LIST_FILENAME=${2}
 
+    # Appending a slash (/) will download the directory content as a file named index.html
     ${WGETCMD} "${PATCHES_URL}/" &&
 
     # Append log to global log file
@@ -61,6 +178,8 @@ update_packages_init()
     # downloading other packages
     export RCMD_NO_EXIT=1
 
+    test_presence_of_packages_directory
+
     # First create log directory if it does not exists.
     if [ ! -d ${LFS_LOG_DIR} ]; then
        install -m755 -d ${LFS_LOG_DIR} || exit 1
@@ -86,6 +205,12 @@ update_packages_init()
         rcmd "Fetching LFS patches list" static_fetch_patches_list ${LFS_PATCHES_URL} ${LFS_PATCHES_LIST}
     fi
 
+    if [ -n "${USE_LFS_OLD_PATCHES}" ]; then
+        # Getting list of all LFS old patches from hugovil.com server.
+        rcmd "Fetching LFS old patches list" static_fetch_patches_list \
+            ${LFS_OLD_PATCHES_URL} ${LFS_OLD_PATCHES_LIST}
+    fi
+
     if [ -n "${USE_BLFS_PATCHES}" ]; then
         # Getting list of all patches from BLFS server.
         rcmd "Fetching BLFS patches list" static_fetch_patches_list ${BLFS_PATCHES_URL} ${BLFS_PATCHES_LIST}
@@ -115,14 +240,33 @@ static_checkpatch()
     # Remplace les "+" par "%2B"
     local PACK_URL=$(echo $PACK | sed s!\+!%2B!g)
 
-    local PATCHES_FOUND=$(cat ${PATCHES_LIST} | grep "${PACK_URL}-" | sed "s/.*\(${PACK_URL}-.*\.patch\).*/\1/")
+    # Patches list formats (patches-list-*.html):
+    #   LFS:     a href="name.patch"
+    #   hugovil: a href="dir/subdir/subdir/name.patch"
+    # We must search for a patch beginning with either a slash or a " to avoid
+    # the possibility of having another package name within a patch name:
+    #   if patch = Mesalib-8.0.4-llvm-3.1-fixes-1.patch
+    #   then we could erroneously try to download patch "llvm-3.1-fixes-1.patch"
+    local PATCHES_FOUND=$(cat ${PATCHES_LIST} | \
+        egrep "\"${PACK_URL}-|/${PACK_URL}-" | \
+        egrep ".patch\"" | \
+        sed "s/.*\(${PACK_URL}-.*\.patch\)\".*/\1/")
+
     if [ -n "${PATCHES_FOUND}" ]; then
        for p in ${PATCHES_FOUND}; do
             # Remplace les "%2B" par "+"
             PATCH_NAME=$(echo ${p} | sed s!%2B!\+!g)
 
            if [ ! -f ${LFS_PKG_DIR}/${PATCH_NAME} ]; then
-               rcmd "Fetching ${PATCH_NAME} from ${PATCHES_URL}" ${WGETCMD} ${PATCHES_URL}/${p}
+                # Fetch patch only if it is not blacklisted!
+                local BL=$(cat ${PATCHES_BLACKLIST} | \
+                    egrep "${PATCH_NAME}")
+
+                if [ "x${BL}" == "x" ]; then
+                    wget_wrapper ${PATCHES_URL} ${PATCH_NAME}
+                else
+                    MSGSTRING="Patch ${PATCH_NAME} blacklisted" print_status warning
+                fi
            fi
        done
     fi
@@ -145,6 +289,11 @@ static_getpatch()
         static_checkpatch ${PACK} ${LFS_PATCHES_LIST} ${LFS_PATCHES_URL}
     fi
 
+    if [ -n "${USE_LFS_OLD_PATCHES}" ]; then
+        # Checking if patch is available from LFS old.
+        static_checkpatch ${PACK} ${LFS_OLD_PATCHES_LIST} ${LFS_OLD_PATCHES_URL}
+    fi
+
     if [ -n "${USE_BLFS_PATCHES}" ]; then
         # Checking if patch is available from BLFS.
         static_checkpatch ${PACK} ${BLFS_PATCHES_LIST} ${BLFS_PATCHES_URL}
@@ -170,7 +319,7 @@ detect_file_not_found()
         #echo "404 NOTFOUND"
         return 0
     fi
-    
+
     return 1
 }
 
@@ -201,32 +350,15 @@ static_getpkg()
     fi
 
     for arch_ext in ${PREFERRED_EXT}; do
-        # Don't take any chance: remove any partially downloaded file.
-        # If we arrive here, it means the final destination file was not found
-        # so we can safely remove any file prior to trying to download it.
-        rm -f ${LFS_PKG_DIR}/${PACK}.${arch_ext}
-
-        rcmd "Fetching ${PACK}.${arch_ext}" \
-            ${WGETCMD} ${URL}/${PACK}.${arch_ext}
-        wget_status=$?
-
-        # Append log to global log file
-        cat ${WGET_LOG_FILE} >> ${LFS_LOG_FILE}
-
-        # Failure: if it was a connection timeout, don't try for other file
-        # extensions.
-        if grep -q "failed: Connection timed out" ${WGET_LOG_FILE}; then
-            echo "Error, wget reported: Connection timed out"
-            return 1
-        fi
+        wget_wrapper ${URL} ${PACK}.${arch_ext}
+        wget_wrapper_status=$?
 
-        if detect_file_not_found; then
-            # Try next archive extension if file was not found.
-            continue;
+        if [ ${wget_wrapper_status} -eq 1 ]; then
+            return 1;
         fi
 
-        if [ ${wget_status} -ne 0 ]; then
-            return 1
+        if [ ${wget_wrapper_status} -eq 2 ]; then
+            continue;
         fi
 
         # If we are here, it means the file was successfully downloaded.
@@ -234,7 +366,7 @@ static_getpkg()
 
         # Convert to bzip2 format if requested and if it is a tar/gz archive
         if [ -n "${PREFER_BZIP2_ARCHIVE}" ]; then
-            if is_archive_tar_gzip "${arch_ext}" ; then
+            if is_extension_tar_gzip "${arch_ext}" ; then
                 rcmd "Converting ${PACK}.${arch_ext} to bzip2 format" \
                     gztobz2 ${LFS_PKG_DIR}/${PACK}.${arch_ext}
                 FINAL_EXT="tar.bz2"
@@ -272,7 +404,7 @@ test_archive_integrity()
 
     if [ "x${EXT}" = "xtar.bz2" ]; then
         TESTCMD="bzip2 -t"
-    elif is_archive_tar_gzip "${EXT}"; then
+    elif is_extension_tar_gzip "${EXT}"; then
         TESTCMD="gunzip -t"
     elif [ "x${EXT}" = "xtar.xz" ]; then
         TESTCMD="unxz -t"
@@ -378,7 +510,7 @@ fpkg()
                 fi
                 if [ -z "${FILE_EXT}" ]; then
                     # Default file extension is tar.gz
-                    FILE_EXT="tar.gz"
+                    FILE_EXT="tar.xz"
                 fi
                 ;;
             gnome)
@@ -388,6 +520,10 @@ fpkg()
                    SRC_DIR=$(get_pkg_name ${PACK})
                 fi
                 SRC_DIR="${SRC_DIR}/$(get_pkg_ver2 ${PACK})"
+                                if [ -z "${FILE_EXT}" ]; then
+                    # Default file extension is xz
+                    FILE_EXT="tar.xz"
+                fi
                 ;;
             sf)
                 URL=${SOURCEFORGE_URL}