# URL
LFS_BASE_URL="http://www.linuxfromscratch.org"
+CLFS_BASE_URL="http://svn.cross-lfs.org/svn/repos/cross-lfs/trunk"
+HV_BASE_URL="http://www.hugovil.com/hvlinux-repository"
+
LFS_PATCHES_URL="${LFS_BASE_URL}/patches/lfs/development"
-LFS_PACKAGES_URL="ftp://ftp.osuosl.org/pub/lfs/lfs-packages/conglomeration"
BLFS_PATCHES_URL="${LFS_BASE_URL}/patches/blfs/svn"
-HV_BASE_URL="http://www.hugovil.com/hvlinux-repository"
-HV_PACKAGES_URL="${HV_BASE_URL}/packages"
+CLFS_PATCHES_URL="http://patches.cross-lfs.org/dev"
HV_PATCHES_URL="${HV_BASE_URL}/patches"
-SOURCEFORGE_URL="http://internap.dl.sourceforge.net/sourceforge"
+
+LFS_PACKAGES_URL="ftp://ftp.osuosl.org/pub/lfs/lfs-packages/conglomeration"
+HV_PACKAGES_URL="${HV_BASE_URL}/packages"
+SOURCEFORGE_URL="http://cdnetworks-us-1.dl.sourceforge.net"
GNOME_URL="http://ftp.gnome.org/pub/gnome/sources"
GNU_URL="http://ftp.gnu.org/pub/gnu"
TETEX_URL="http://www.tug.org/ftp/tex-archive/systems/unix/teTeX"
LFS_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-lfs.html
BLFS_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-blfs.html
+CLFS_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-clfs.html
HV_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-hv.html
# Arg. #1: URL for patches repository.
# slash is absolutely necessary for this to work.
update_packages_init()
{
+ # Execute a return statement instead of exit, to be able to continue with
+ # downloading other packages
+ export RCMD_NO_EXIT=1
+
# First create log directory if it does not exists.
if [ ! -d ${LFS_LOG_DIR} ]; then
install -m755 -d ${LFS_LOG_DIR} || exit 1
install -v -m755 -d ${LFS_PKG_DIR} 1> ${LFS_LOG_FILE} 2>&1 || exit 1
fi
- # Getting list of all patches from LFS server.
- ##action_checkbox "Fetching LFS patches list"
- static_fetch_patches_list ${LFS_PATCHES_URL} ${LFS_PATCHES_LIST}
+ if [ -n "${USE_LFS_PATCHES}" ]; then
+ # Getting list of all patches from LFS server.
+ rcmd "Fetching LFS patches list" static_fetch_patches_list ${LFS_PATCHES_URL} ${LFS_PATCHES_LIST}
+ fi
+
+ if [ -n "${USE_BLFS_PATCHES}" ]; then
+ # Getting list of all patches from BLFS server.
+ rcmd "Fetching BLFS patches list" static_fetch_patches_list ${BLFS_PATCHES_URL} ${BLFS_PATCHES_LIST}
+ fi
- # Getting list of all patches from BLFS server.
- action_checkbox "Fetching BLFS patches list" static_fetch_patches_list ${BLFS_PATCHES_URL} ${BLFS_PATCHES_LIST}
+ if [ -n "${USE_CLFS_PATCHES}" ]; then
+ # Getting list of all patches from CLFS server.
+ rcmd "Fetching CLFS patches list" static_fetch_patches_list ${CLFS_PATCHES_URL} ${CLFS_PATCHES_LIST}
+ fi
- # Getting list of all patches from hugovil.com server.
- action_checkbox "Fetching hugovil.com patches list" static_fetch_patches_list ${HV_PATCHES_URL} ${HV_PATCHES_LIST}
+ if [ -n "${USE_HV_PATCHES}" ]; then
+ # Getting list of all patches from hugovil.com server.
+ rcmd "Fetching hugovil.com patches list" static_fetch_patches_list ${HV_PATCHES_URL} ${HV_PATCHES_LIST}
+ fi
}
# Get patch package if it is not in the repository
if [ -n "${PATCHES_FOUND}" ]; then
for p in ${PATCHES_FOUND}; do
if [ ! -f ${LFS_PKG_DIR}/${p} ]; then
- action_checkbox_time "Fetching ${p}" ${WGETCMD} ${PATCHES_URL}/${p}
+ rcmd "Fetching ${p} from ${PATCHES_URL}" ${WGETCMD} ${PATCHES_URL}/${p}
fi
done
fi
return 1
fi
- # Checking if patch is available from LFS.
- static_checkpatch ${PACK} ${LFS_PATCHES_LIST} ${LFS_PATCHES_URL}
+ if [ -n "${USE_LFS_PATCHES}" ]; then
+ # Checking if patch is available from LFS.
+ static_checkpatch ${PACK} ${LFS_PATCHES_LIST} ${LFS_PATCHES_URL}
+ fi
+
+ if [ -n "${USE_BLFS_PATCHES}" ]; then
+ # Checking if patch is available from BLFS.
+ static_checkpatch ${PACK} ${BLFS_PATCHES_LIST} ${BLFS_PATCHES_URL}
+ fi
- # Checking if patch is available from BLFS.
- static_checkpatch ${PACK} ${BLFS_PATCHES_LIST} ${BLFS_PATCHES_URL}
+ if [ -n "${USE_CLFS_PATCHES}" ]; then
+ # Checking if patch is available from CLFS.
+ static_checkpatch ${PACK} ${CLFS_PATCHES_LIST} ${CLFS_PATCHES_URL}
+ fi
- # Checking if patch is available from hugovil.com.
- static_checkpatch ${PACK} ${HV_PATCHES_LIST} ${HV_PATCHES_URL}
+ if [ -n "${USE_HV_PATCHES}" ]; then
+ # Checking if patch is available from hugovil.com.
+ static_checkpatch ${PACK} ${HV_PATCHES_LIST} ${HV_PATCHES_URL}
+ fi
}
# Convert multiple compressed gzip files to bzip2.
# HTTP: will return error code 404.
# FTP: will say "File not found"
if grep "404" ${WGET_LOG_FILE} 1> /dev/null 2>&1; then
+ echo NOTFOUND
return 0
elif grep "No such file" ${WGET_LOG_FILE} 1> /dev/null 2>&1; then
return 0
# Get package if it is not in the repository
# Arg. #1: Package name and version
# Arg. #2: URL
+# Arg. #3: Optional extension
static_getpkg()
{
- PACK=${1}
- URL=${2}
+ local PACK=${1}
+ local URL=${2}
+ local MY_ARCH_EXT=${3}
# Checking for correct number of arguments
- if [ $# -ne 2 ]; then
+ if [ $# -ne 2 -a $# -ne 3 ]; then
echo "${FUNCNAME}(), wrong number of arguments: ${*}"
return 1
fi
- ${WGETCMD} ${URL}/${PACK}.tar.bz2 && return 0
-
- # Failure: if it was a connection timeout, don't try for other file extensions.
- if grep "failed: Connection timed out" ${WGET_LOG_FILE} 1> /dev/null 2>&1; then
- echo "Error, wget reported: Connection timed out"
- return 1
+ if [ -z "${ARCH_EXT}" ]; then
+ # List of default archive extensions to try
+ MY_ARCH_EXT="tar.bz2 tar.gz tgz tar.Z"
fi
- if detect_file_not_found; then
- # If file was not found, maybe a .tar.gz file exist...
- ${WGETCMD} ${URL}/${PACK}.tar.gz
-
- if [ ${?} -eq 0 ]; then
- gztobz2 ${LFS_PKG_DIR}/${PACK}.tar.gz || return 1
- return 0
- else
- if detect_file_not_found; then
- # If file was not found, maybe a .tgz file exist...
- ${WGETCMD} ${URL}/${PACK}.tgz
- if [ ${?} -eq 0 ]; then
- gztobz2 ${LFS_PKG_DIR}/${PACK}.tgz || return 1
- return 0
- else
- if detect_file_not_found; then
- # If file was not found, maybe a .tar.Z file exist... (uw-imap...)
- ${WGETCMD} ${URL}/${PACK}.tar.Z
- if [ ${?} -eq 0 ]; then
- gztobz2 ${LFS_PKG_DIR}/${PACK}.tar.Z || return 1
- return 0
- fi
- fi
- fi
- fi
+ echo " MY_ARCH_EXT=${MY_ARCH_EXT}"
+
+ for arch_ext in ${MY_ARCH_EXT}; do
+ ${WGETCMD} ${URL}/${PACK}.${arch_ext}
+
+ # Failure: if it was a connection timeout, don't try for other file extensions.
+ if grep "failed: Connection timed out" ${WGET_LOG_FILE} 1> /dev/null 2>&1; then
+ echo "Error, wget reported: Connection timed out"
+ return 1
+ fi
+
+ if detect_file_not_found; then
+ # Try next archive extension if web server reported that file is not found.
+ continue;
fi
- fi
- # Failure...
+ # If we arrive here, it means we were able to download the file.
+ if [ "x${arch_ext}" = "xtar.gz" -o "x${arch_ext}" = "xtgz" -o "x${arch_ext}" = "xtar.Z" ]; then
+ gztobz2 ${LFS_PKG_DIR}/${PACK}.${arch_ext}
+ fi
+ return $?
+ done
+
+ # Failure or file not found
return 1
}
return 1
fi
- if [ ! -f ${LFS_PKG_DIR}/${PACK}.tar.bz2 ]; then
- action_checkbox_time "Fetching ${PACK}" static_getpkg ${PACK} ${URL}
+ # Default extension
+ local FINAL_EXT=tar.bz2
+
+ local FETCH_STRING="Fetching ${PACK}"
+
+ if [ -n "${ARCH_EXT}" ]; then
+ FINAL_EXT=${ARCH_EXT}
+ FETCH_STRING="${FETCH_STRING}.${ARCH_EXT}"
+ fi
+
+ # Default destination directory
+ local MY_FETCH_DIR=${LFS_PKG_DIR}
+
+ if [ -n "${FETCH_DIR}" ]; then
+ local MY_FETCH_DIR=${FETCH_DIR}
+ local MOVE_FILE=1
+ fi
+
+ DEST_FILE=${MY_FETCH_DIR}/${PACK}.${FINAL_EXT}
+
+ ##########echo "DEST_FILE=${DEST_FILE}"
+ if [ ! -f ${DEST_FILE} ]; then
+ rcmd "${FETCH_STRING}" static_getpkg ${PACK} ${URL} ${ARCH_EXT}
+
+ # Move to final directory if alternate directory specified
+ if [ -f ${LFS_PKG_DIR}/${PACK}.${FINAL_EXT} -a -n "${MOVE_FILE}" ]; then
+ mv ${LFS_PKG_DIR}/${PACK}.${FINAL_EXT} ${DEST_FILE}
+ fi
fi
# Check if a patch is available.
# Checking for correct number of arguments
if [ $# -ne 3 ]; then
echo "${FUNCNAME}(), wrong number of arguments: ${*}"
- return 1
+ reurn 1
+ fi
+
+ if [ -z "${ARCH_EXT}" ]; then
+ # Default extension
+ FINAL_EXT=tar.bz2
+ else
+ FINAL_EXT=${ARCH_EXT}
fi
- if [ ! -f ${LFS_PKG_DIR}/${TARGET}.tar.bz2 ]; then
+ if [ ! -f ${LFS_PKG_DIR}/${TARGET}.${FINAL_EXT} ]; then
fpkg ${SOURCE} ${URL}
- if [ -f ${LFS_PKG_DIR}/${SOURCE}.tar.bz2 ]; then
- mv ${LFS_PKG_DIR}/${SOURCE}.tar.bz2 ${LFS_PKG_DIR}/${TARGET}.tar.bz2
+ if [ -f ${LFS_PKG_DIR}/${SOURCE}.${FINAL_EXT} ]; then
+ mv ${LFS_PKG_DIR}/${SOURCE}.${FINAL_EXT} ${LFS_PKG_DIR}/${TARGET}.${FINAL_EXT}
fi
fi
fi
if [ ! -h ${LFS_PKG_DIR}/${FILE} ]; then
- # The link does not exist. First check if source file exist.
- #if [ ! -f ${LFS_PKG_BASE}/${SRCSTAGE}/${FILE} ]; then
- # echo "${FUNCNAME} ${*}"
- # echo "Missing source file..."
- # return 1
- #fi
+ # Issue a warning if source file doesn't exist.
+ if [ ! -f ${LFS_PKG_BASE}/${SRCSTAGE}/${FILE} ]; then
+ source_link_status=" (missing source file)"
+ else
+ source_link_status=""
+ fi
# Create link if it doesn't exist
- action_checkbox_time "Linking ${PACKAGE_NAME}" ln -s ../${SRCSTAGE}/${FILE} ${LFS_PKG_DIR}/${FILE}
+ rcmd "Linking ${PACKAGE_NAME}${source_link_status}" ln -s ../${SRCSTAGE}/${FILE} ${LFS_PKG_DIR}/${FILE}
fi
# Create link for patches corresponding to that package:
PATCHFILE=$(basename ${patch})
if [ ! -h ${LFS_PKG_DIR}/${PATCHFILE} ]; then
# Create link if it doesn't exist
- action_checkbox_time "Linking ${PACKAGE_NAME}" ln -s ../${SRCSTAGE}/${PATCHFILE} ${LFS_PKG_DIR}/${PATCHFILE}
+ rcmd "Linking ${PATCHFILE}" ln -s ../${SRCSTAGE}/${PATCHFILE} ${LFS_PKG_DIR}/${PATCHFILE}
fi
done
fi