Séparation des stages (continuation)
[hvlinux.git] / functions-update
index 1fe40ed..916f239 100644 (file)
@@ -1,6 +1,6 @@
 #!/bin/sh
 
-LFS_PKG_BASE="$(dirname $(dirname $(pwd)))/packages"
+LFS_PKG_BASE="$(dirname $(pwd))/packages"
 LFS_PKG_DIR="${LFS_PKG_BASE}/${LFS_STAGE}"
 LFS_LOG_DIR="${LFS}/var/log/hvlinux-install/${LFS_STAGE}"
 LFS_LOG_FILE="${LFS_LOG_DIR}/pkg-update.log"
@@ -8,27 +8,32 @@ WGET_LOG_FILE="${LFS_LOG_DIR}/pkg-wget.log"
 
 # URL
 LFS_BASE_URL="http://www.linuxfromscratch.org"
+CLFS_BASE_URL="http://svn.cross-lfs.org/svn/repos/cross-lfs/trunk"
+HV_BASE_URL="http://www.hugovil.com/hvlinux-repository"
+
 LFS_PATCHES_URL="${LFS_BASE_URL}/patches/lfs/development"
-LFS_PACKAGES_URL="ftp://ftp.osuosl.org/pub/lfs/lfs-packages/conglomeration"
 BLFS_PATCHES_URL="${LFS_BASE_URL}/patches/blfs/svn"
-HV_BASE_URL="http://www.hugovil.com/hvlinux-repository"
-HV_PACKAGES_URL="${HV_BASE_URL}/packages"
+CLFS_PATCHES_URL="http://patches.cross-lfs.org/dev"
 HV_PATCHES_URL="${HV_BASE_URL}/patches"
+
+LFS_PACKAGES_URL="ftp://ftp.osuosl.org/pub/lfs/lfs-packages/conglomeration"
+HV_PACKAGES_URL="${HV_BASE_URL}/packages"
 SOURCEFORGE_URL="http://internap.dl.sourceforge.net/sourceforge"
 GNOME_URL="http://ftp.gnome.org/pub/gnome/sources"
 GNU_URL="http://ftp.gnu.org/pub/gnu"
 TETEX_URL="http://www.tug.org/ftp/tex-archive/systems/unix/teTeX"
 
-WGETCMD="wget --directory-prefix=${LFS_PKG_DIR} --timeout=5 --tries=3 -o ${WGET_LOG_FILE}"
+WGETCMD="wget --directory-prefix=${LFS_PKG_DIR} --timeout=5 --tries=3 -nc -c -o ${WGET_LOG_FILE}"
 
 LFS_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-lfs.html
 BLFS_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-blfs.html
+CLFS_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-clfs.html
 HV_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-hv.html
 
-
 # Arg. #1: URL for patches repository.
 # Arg. #2: Destination filename.
-static_fetch_patches_list() {
+static_fetch_patches_list()
+{
     PATCHES_URL=${1}
     PATCHES_LIST_FILENAME=${2}
 
@@ -36,10 +41,10 @@ static_fetch_patches_list() {
     mv ${LFS_PKG_DIR}/index.html ${PATCHES_LIST_FILENAME}
 }
 
-
 # Arg. #1: URL for patches repository. The trailing
 #          slash is absolutely necessary for this to work.
-update_packages_init() {
+update_packages_init()
+{
     # First create log directory if it does not exists.
     if [ ! -d ${LFS_LOG_DIR} ]; then
        install -m755 -d ${LFS_LOG_DIR} || exit 1
@@ -50,23 +55,33 @@ update_packages_init() {
        install -v -m755 -d ${LFS_PKG_DIR} 1> ${LFS_LOG_FILE} 2>&1 || exit 1
     fi
 
-    # Getting list of all patches from LFS server.
-    ##action_checkbox "Fetching LFS patches list"
-    static_fetch_patches_list ${LFS_PATCHES_URL} ${LFS_PATCHES_LIST}
+    if [ -n "${USE_LFS_PATCHES}" ]; then
+        # Getting list of all patches from LFS server.
+        rcmd "Fetching LFS patches list" static_fetch_patches_list ${LFS_PATCHES_URL} ${LFS_PATCHES_LIST}
+    fi
+
+    if [ -n "${USE_BLFS_PATCHES}" ]; then
+        # Getting list of all patches from BLFS server.
+        rcmd "Fetching BLFS patches list" static_fetch_patches_list ${BLFS_PATCHES_URL} ${BLFS_PATCHES_LIST}
+    fi
 
-    # Getting list of all patches from BLFS server.
-    action_checkbox "Fetching BLFS patches list" static_fetch_patches_list ${BLFS_PATCHES_URL} ${BLFS_PATCHES_LIST}
+    if [ -n "${USE_CLFS_PATCHES}" ]; then
+        # Getting list of all patches from CLFS server.
+        rcmd "Fetching CLFS patches list" static_fetch_patches_list ${CLFS_PATCHES_URL} ${CLFS_PATCHES_LIST}
+    fi
 
-    # Getting list of all patches from hugovil.com server.
-    action_checkbox "Fetching hugovil.com patches list" static_fetch_patches_list ${HV_PATCHES_URL} ${HV_PATCHES_LIST}
+    if [ -n "${USE_HV_PATCHES}" ]; then
+        # Getting list of all patches from hugovil.com server.
+        rcmd "Fetching hugovil.com patches list" static_fetch_patches_list ${HV_PATCHES_URL} ${HV_PATCHES_LIST}
+    fi
 }
 
-
 # Get patch package if it is not in the repository
 # Arg. #1: Package name and version
 # Arg. #2: Patches list file (HTML)
 # Arg. #3: Patches URL
-static_checkpatch() {
+static_checkpatch()
+{
     local PACK=${1}
     local PATCHES_LIST=${2}
     local PATCHES_URL=${3}
@@ -75,16 +90,16 @@ static_checkpatch() {
     if [ -n "${PATCHES_FOUND}" ]; then
        for p in ${PATCHES_FOUND}; do
            if [ ! -f ${LFS_PKG_DIR}/${p} ]; then
-               action_checkbox_time "Fetching ${p}" ${WGETCMD} ${PATCHES_URL}/${p}
+               rcmd "Fetching ${p} from ${PATCHES_URL}" ${WGETCMD} ${PATCHES_URL}/${p}
            fi
        done
     fi
 }
 
-
 # Get patch package if it is not in the repository
 # Arg. #1: Package name and version
-static_getpatch() {
+static_getpatch()
+{
     PACK=${1}
 
     # Checking for correct number of arguments
@@ -93,16 +108,26 @@ static_getpatch() {
        return 1
     fi
 
-    # Checking if patch is available from LFS.
-    static_checkpatch ${PACK} ${LFS_PATCHES_LIST} ${LFS_PATCHES_URL}
+    if [ -n "${USE_LFS_PATCHES}" ]; then
+        # Checking if patch is available from LFS.
+        static_checkpatch ${PACK} ${LFS_PATCHES_LIST} ${LFS_PATCHES_URL}
+    fi
 
-    # Checking if patch is available from BLFS.
-    static_checkpatch ${PACK} ${BLFS_PATCHES_LIST} ${BLFS_PATCHES_URL}
+    if [ -n "${USE_BLFS_PATCHES}" ]; then
+        # Checking if patch is available from BLFS.
+        static_checkpatch ${PACK} ${BLFS_PATCHES_LIST} ${BLFS_PATCHES_URL}
+    fi
 
-    # Checking if patch is available from hugovil.com.
-    static_checkpatch ${PACK} ${HV_PATCHES_LIST} ${HV_PATCHES_URL}
-}
+    if [ -n "${USE_CLFS_PATCHES}" ]; then
+        # Checking if patch is available from CLFS.
+        static_checkpatch ${PACK} ${CLFS_PATCHES_LIST} ${CLFS_PATCHES_URL}
+    fi
 
+    if [ -n "${USE_HV_PATCHES}" ]; then
+        # Checking if patch is available from hugovil.com.
+        static_checkpatch ${PACK} ${HV_PATCHES_LIST} ${HV_PATCHES_URL}
+    fi
+}
 
 # Convert multiple compressed gzip files to bzip2.
 # Usage: gztobz2 [FILES]
@@ -152,12 +177,12 @@ gztobz2()
     done
 }
 
-
 detect_file_not_found()
 {
     # HTTP: will return error code 404.
     # FTP: will say "File not found"
-    if grep "404 Not Found" ${WGET_LOG_FILE} 1> /dev/null 2>&1; then
+    if grep "404" ${WGET_LOG_FILE} 1> /dev/null 2>&1; then
+        echo NOTFOUND
         return 0
     elif grep "No such file" ${WGET_LOG_FILE} 1> /dev/null 2>&1; then
         return 0
@@ -166,11 +191,11 @@ detect_file_not_found()
     fi
 }
 
-
 # Get package if it is not in the repository
 # Arg. #1: Package name and version
 # Arg. #2: URL
-static_getpkg() {
+static_getpkg()
+{
     PACK=${1}
     URL=${2}
 
@@ -180,50 +205,41 @@ static_getpkg() {
        return 1
     fi
 
-    ${WGETCMD} ${URL}/${PACK}.tar.bz2 && return 0
-
-    # Failure: if it was a connection timeout, don't try for other file extensions.
-    if grep "failed: Connection timed out" ${WGET_LOG_FILE} 1> /dev/null 2>&1; then
-        echo "Error, wget reported: Connection timed out"
-        return 1
+    if [ -z "${ARCH_EXT}" ]; then
+        # List of default archive extensions to try
+        ARCH_EXT="tar.bz2 tar.gz tgz .tar.Z"
     fi
 
-    if detect_file_not_found; then
-        # If file was not found, maybe a .tar.gz file exist...
-       ${WGETCMD} ${URL}/${PACK}.tar.gz
-       if [ ${?} -eq 0 ]; then
-           gztobz2 ${LFS_PKG_DIR}/${PACK}.tar.gz || return 1
-            return 0
-       else
-            if detect_file_not_found; then
-                # If file was not found, maybe a .tgz file exist...
-               ${WGETCMD} ${URL}/${PACK}.tgz
-               if [ ${?} -eq 0 ]; then
-                   gztobz2 ${LFS_PKG_DIR}/${PACK}.tgz || return 1
-                    return 0
-               else
-                    if detect_file_not_found; then
-                        # If file was not found, maybe a .tar.Z file exist... (uw-imap...)
-                       ${WGETCMD} ${URL}/${PACK}.tar.Z
-                       if [ ${?} -eq 0 ]; then
-                           gztobz2 ${LFS_PKG_DIR}/${PACK}.tar.Z || return 1
-                            return 0
-                       fi
-                   fi
-                fi
-            fi
+    for arch_ext in ${ARCH_EXT}; do
+        ${WGETCMD} ${URL}/${PACK}.${arch_ext}
+
+        # Failure: if it was a connection timeout, don't try for other file extensions.
+        if grep "failed: Connection timed out" ${WGET_LOG_FILE} 1> /dev/null 2>&1; then
+            echo "Error, wget reported: Connection timed out"
+            return 1
         fi
-    fi
 
-    # Failure...
+        if detect_file_not_found; then
+            # Try next archive extension if web server reported that file is not found.
+            continue;
+        fi
+
+        # If we arrive here, it means we were able to download the file.
+        if [ "x${arch_ext}" != "xtar.bz2" ]; then
+           gztobz2 ${LFS_PKG_DIR}/${PACK}.${arch_ext}
+        fi
+        return $?
+    done
+
+    # Failure or file not found
     return 1
 }
 
-
 # Get package if it is not in the repository
 # Arg. #1: Package name and version
 # Arg. #2: URL
-fpkg() {
+fpkg()
+{
     PACK=${1}
     URL=${2}
 
@@ -234,21 +250,21 @@ fpkg() {
     fi
 
     if [ ! -f ${LFS_PKG_DIR}/${PACK}.tar.bz2 ]; then
-       action_checkbox_time "Fetching ${PACK}" static_getpkg ${PACK} ${URL}
+       rcmd "Fetching ${PACK}" static_getpkg ${PACK} ${URL}
     fi
 
     # Check if a patch is available.
     static_getpatch ${PACK}
 }
 
-
 # Get package if it is not in the repository.
 # This function is used if the source and target names for the
 # package are different (name mismatch).
 # Arg. #1: Target package name and version (on disk)
 # Arg. #2: Source package name and version (on internet)
 # Arg. #3: URL
-fpkg_mis() {
+fpkg_mis()
+{
     TARGET=${1}
     SOURCE=${2}
     URL=${3}
@@ -256,7 +272,7 @@ fpkg_mis() {
     # Checking for correct number of arguments
     if [ $# -ne 3 ]; then
        echo "${FUNCNAME}(), wrong number of arguments: ${*}"
-       return 1
+       reurn 1
     fi
 
     if [ ! -f ${LFS_PKG_DIR}/${TARGET}.tar.bz2 ]; then
@@ -280,11 +296,11 @@ fpkg_mis() {
     fi
 }
 
-
 # Get package if it is not in the repository
 # Arg. #1: Package name and version
 # Arg. #2: Directory name (optional)
-fpkg_gnu() {
+fpkg_gnu()
+{
     PACK=${1}
 
     if [ $# -eq 2 ]; then
@@ -302,11 +318,11 @@ fpkg_gnu() {
     fpkg ${PACK} "${GNU_URL}/${NAME}"
 }
 
-
 # Fetch Gnome package (if it is not in the repository).
 # Arg. #1: Package name and version
 # Arg. #2: Directory name (optional)
-fpkg_gnome() {
+fpkg_gnome()
+{
     PACK=${1}
 
     if [ $# -eq 2 ]; then
@@ -324,11 +340,11 @@ fpkg_gnome() {
     fpkg ${PACK} "${GNOME_URL}/${NAME}/$(get_pkg_ver_base ${PACK})"
 }
 
-
 # Get package if it is not in the repository
 # Arg. #1: Package name and version
 # Arg. #2: Directory name (optional)
-fpkg_sf() {
+fpkg_sf()
+{
     PACK=${1}
 
     if [ $# -eq 2 ]; then
@@ -346,11 +362,11 @@ fpkg_sf() {
     fpkg ${PACK} ${SOURCEFORGE_URL}/${NAME}
 }
 
-
 # Get package if it is not in the repository
 # Arg. #1: Package name and version
 # Arg. #2: Directory name (optional)
-fpkg_hv() {
+fpkg_hv()
+{
     PACK=${1}
 
     # Checking for correct number of arguments
@@ -366,11 +382,11 @@ fpkg_hv() {
     fi
 }
 
-
 # Get package if it is not in the repository
 # Arg. #1: Package name and version
 # Arg. #2: Directory name (optional)
-fpkg_lfs() {
+fpkg_lfs()
+{
     PACK=${1}
 
     # Checking for correct number of arguments
@@ -386,12 +402,12 @@ fpkg_lfs() {
     fi
 }
 
-
 # Create a symbolic link to a package that is located in another stage
 # repository (to not have the same file twice).
 # Arg. #1: Source stage number (1, 2, 3, etc)
 # Arg. #2: Package name
-lpkg() {
+lpkg()
+{
     SRCSTAGE="stage${1}"
     FILE="${2}.tar.bz2"
     PACKAGE_NAME="${2}"
@@ -403,15 +419,15 @@ lpkg() {
     fi
 
     if [ ! -h ${LFS_PKG_DIR}/${FILE} ]; then
-       # The link does not exist. First check if source file exist.
+       # Issue a warning if source file doesn't exist.
        if [ ! -f ${LFS_PKG_BASE}/${SRCSTAGE}/${FILE} ]; then
-           echo "${FUNCNAME} ${*}"
-           echo "Missing source file..."
-           exit 1
+            source_link_status=" (missing source file)"
+        else
+            source_link_status=""
        fi
 
        # Create link if it doesn't exist
-       ln -s ../${SRCSTAGE}/${FILE} ${LFS_PKG_DIR}/${FILE}
+       rcmd "Linking ${PACKAGE_NAME}${source_link_status}" ln -s ../${SRCSTAGE}/${FILE} ${LFS_PKG_DIR}/${FILE}
     fi
 
     # Create link for patches corresponding to that package:
@@ -420,7 +436,7 @@ lpkg() {
            PATCHFILE=$(basename ${patch})
            if [ ! -h ${LFS_PKG_DIR}/${PATCHFILE} ]; then
                # Create link if it doesn't exist
-               ln -s ../${SRCSTAGE}/${PATCHFILE} ${LFS_PKG_DIR}/${PATCHFILE}
+               rcmd "Linking ${PATCHFILE}" ln -s ../${SRCSTAGE}/${PATCHFILE} ${LFS_PKG_DIR}/${PATCHFILE}
            fi
        done
     fi