X-Git-Url: http://gitweb.hugovil.com/?a=blobdiff_plain;f=functions-update;h=916f239647425333042e7f7439a1873a2b9c8080;hb=049b8e35d7fd2eabb781f02b8022fdca43c980fb;hp=4c6752923339b32d1ddbefa84b4baf0532fbd873;hpb=6a47d6d86ad7ac2d58c56499baaedb9fdd0aa800;p=hvlinux.git diff --git a/functions-update b/functions-update index 4c67529..916f239 100644 --- a/functions-update +++ b/functions-update @@ -8,12 +8,16 @@ WGET_LOG_FILE="${LFS_LOG_DIR}/pkg-wget.log" # URL LFS_BASE_URL="http://www.linuxfromscratch.org" +CLFS_BASE_URL="http://svn.cross-lfs.org/svn/repos/cross-lfs/trunk" +HV_BASE_URL="http://www.hugovil.com/hvlinux-repository" + LFS_PATCHES_URL="${LFS_BASE_URL}/patches/lfs/development" -LFS_PACKAGES_URL="ftp://ftp.osuosl.org/pub/lfs/lfs-packages/conglomeration" BLFS_PATCHES_URL="${LFS_BASE_URL}/patches/blfs/svn" -HV_BASE_URL="http://www.hugovil.com/hvlinux-repository" -HV_PACKAGES_URL="${HV_BASE_URL}/packages" +CLFS_PATCHES_URL="http://patches.cross-lfs.org/dev" HV_PATCHES_URL="${HV_BASE_URL}/patches" + +LFS_PACKAGES_URL="ftp://ftp.osuosl.org/pub/lfs/lfs-packages/conglomeration" +HV_PACKAGES_URL="${HV_BASE_URL}/packages" SOURCEFORGE_URL="http://internap.dl.sourceforge.net/sourceforge" GNOME_URL="http://ftp.gnome.org/pub/gnome/sources" GNU_URL="http://ftp.gnu.org/pub/gnu" @@ -23,6 +27,7 @@ WGETCMD="wget --directory-prefix=${LFS_PKG_DIR} --timeout=5 --tries=3 -nc -c -o LFS_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-lfs.html BLFS_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-blfs.html +CLFS_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-clfs.html HV_PATCHES_LIST=${LFS_PKG_DIR}/patches-list-hv.html # Arg. #1: URL for patches repository. @@ -50,15 +55,25 @@ update_packages_init() install -v -m755 -d ${LFS_PKG_DIR} 1> ${LFS_LOG_FILE} 2>&1 || exit 1 fi - # Getting list of all patches from LFS server. - ##action_checkbox "Fetching LFS patches list" - static_fetch_patches_list ${LFS_PATCHES_URL} ${LFS_PATCHES_LIST} + if [ -n "${USE_LFS_PATCHES}" ]; then + # Getting list of all patches from LFS server. + rcmd "Fetching LFS patches list" static_fetch_patches_list ${LFS_PATCHES_URL} ${LFS_PATCHES_LIST} + fi - # Getting list of all patches from BLFS server. - action_checkbox "Fetching BLFS patches list" static_fetch_patches_list ${BLFS_PATCHES_URL} ${BLFS_PATCHES_LIST} + if [ -n "${USE_BLFS_PATCHES}" ]; then + # Getting list of all patches from BLFS server. + rcmd "Fetching BLFS patches list" static_fetch_patches_list ${BLFS_PATCHES_URL} ${BLFS_PATCHES_LIST} + fi - # Getting list of all patches from hugovil.com server. - action_checkbox "Fetching hugovil.com patches list" static_fetch_patches_list ${HV_PATCHES_URL} ${HV_PATCHES_LIST} + if [ -n "${USE_CLFS_PATCHES}" ]; then + # Getting list of all patches from CLFS server. + rcmd "Fetching CLFS patches list" static_fetch_patches_list ${CLFS_PATCHES_URL} ${CLFS_PATCHES_LIST} + fi + + if [ -n "${USE_HV_PATCHES}" ]; then + # Getting list of all patches from hugovil.com server. + rcmd "Fetching hugovil.com patches list" static_fetch_patches_list ${HV_PATCHES_URL} ${HV_PATCHES_LIST} + fi } # Get patch package if it is not in the repository @@ -75,7 +90,7 @@ static_checkpatch() if [ -n "${PATCHES_FOUND}" ]; then for p in ${PATCHES_FOUND}; do if [ ! -f ${LFS_PKG_DIR}/${p} ]; then - action_checkbox_time "Fetching ${p}" ${WGETCMD} ${PATCHES_URL}/${p} + rcmd "Fetching ${p} from ${PATCHES_URL}" ${WGETCMD} ${PATCHES_URL}/${p} fi done fi @@ -93,14 +108,25 @@ static_getpatch() return 1 fi - # Checking if patch is available from LFS. - static_checkpatch ${PACK} ${LFS_PATCHES_LIST} ${LFS_PATCHES_URL} + if [ -n "${USE_LFS_PATCHES}" ]; then + # Checking if patch is available from LFS. + static_checkpatch ${PACK} ${LFS_PATCHES_LIST} ${LFS_PATCHES_URL} + fi + + if [ -n "${USE_BLFS_PATCHES}" ]; then + # Checking if patch is available from BLFS. + static_checkpatch ${PACK} ${BLFS_PATCHES_LIST} ${BLFS_PATCHES_URL} + fi - # Checking if patch is available from BLFS. - static_checkpatch ${PACK} ${BLFS_PATCHES_LIST} ${BLFS_PATCHES_URL} + if [ -n "${USE_CLFS_PATCHES}" ]; then + # Checking if patch is available from CLFS. + static_checkpatch ${PACK} ${CLFS_PATCHES_LIST} ${CLFS_PATCHES_URL} + fi - # Checking if patch is available from hugovil.com. - static_checkpatch ${PACK} ${HV_PATCHES_LIST} ${HV_PATCHES_URL} + if [ -n "${USE_HV_PATCHES}" ]; then + # Checking if patch is available from hugovil.com. + static_checkpatch ${PACK} ${HV_PATCHES_LIST} ${HV_PATCHES_URL} + fi } # Convert multiple compressed gzip files to bzip2. @@ -155,7 +181,8 @@ detect_file_not_found() { # HTTP: will return error code 404. # FTP: will say "File not found" - if grep "404 Not Found" ${WGET_LOG_FILE} 1> /dev/null 2>&1; then + if grep "404" ${WGET_LOG_FILE} 1> /dev/null 2>&1; then + echo NOTFOUND return 0 elif grep "No such file" ${WGET_LOG_FILE} 1> /dev/null 2>&1; then return 0 @@ -178,42 +205,33 @@ static_getpkg() return 1 fi - ${WGETCMD} ${URL}/${PACK}.tar.bz2 && return 0 - - # Failure: if it was a connection timeout, don't try for other file extensions. - if grep "failed: Connection timed out" ${WGET_LOG_FILE} 1> /dev/null 2>&1; then - echo "Error, wget reported: Connection timed out" - return 1 + if [ -z "${ARCH_EXT}" ]; then + # List of default archive extensions to try + ARCH_EXT="tar.bz2 tar.gz tgz .tar.Z" fi - if detect_file_not_found; then - # If file was not found, maybe a .tar.gz file exist... - ${WGETCMD} ${URL}/${PACK}.tar.gz - if [ ${?} -eq 0 ]; then - gztobz2 ${LFS_PKG_DIR}/${PACK}.tar.gz || return 1 - return 0 - else - if detect_file_not_found; then - # If file was not found, maybe a .tgz file exist... - ${WGETCMD} ${URL}/${PACK}.tgz - if [ ${?} -eq 0 ]; then - gztobz2 ${LFS_PKG_DIR}/${PACK}.tgz || return 1 - return 0 - else - if detect_file_not_found; then - # If file was not found, maybe a .tar.Z file exist... (uw-imap...) - ${WGETCMD} ${URL}/${PACK}.tar.Z - if [ ${?} -eq 0 ]; then - gztobz2 ${LFS_PKG_DIR}/${PACK}.tar.Z || return 1 - return 0 - fi - fi - fi - fi + for arch_ext in ${ARCH_EXT}; do + ${WGETCMD} ${URL}/${PACK}.${arch_ext} + + # Failure: if it was a connection timeout, don't try for other file extensions. + if grep "failed: Connection timed out" ${WGET_LOG_FILE} 1> /dev/null 2>&1; then + echo "Error, wget reported: Connection timed out" + return 1 + fi + + if detect_file_not_found; then + # Try next archive extension if web server reported that file is not found. + continue; fi - fi - # Failure... + # If we arrive here, it means we were able to download the file. + if [ "x${arch_ext}" != "xtar.bz2" ]; then + gztobz2 ${LFS_PKG_DIR}/${PACK}.${arch_ext} + fi + return $? + done + + # Failure or file not found return 1 } @@ -232,7 +250,7 @@ fpkg() fi if [ ! -f ${LFS_PKG_DIR}/${PACK}.tar.bz2 ]; then - action_checkbox_time "Fetching ${PACK}" static_getpkg ${PACK} ${URL} + rcmd "Fetching ${PACK}" static_getpkg ${PACK} ${URL} fi # Check if a patch is available. @@ -254,7 +272,7 @@ fpkg_mis() # Checking for correct number of arguments if [ $# -ne 3 ]; then echo "${FUNCNAME}(), wrong number of arguments: ${*}" - return 1 + reurn 1 fi if [ ! -f ${LFS_PKG_DIR}/${TARGET}.tar.bz2 ]; then @@ -401,15 +419,15 @@ lpkg() fi if [ ! -h ${LFS_PKG_DIR}/${FILE} ]; then - # The link does not exist. First check if source file exist. + # Issue a warning if source file doesn't exist. if [ ! -f ${LFS_PKG_BASE}/${SRCSTAGE}/${FILE} ]; then - echo "${FUNCNAME} ${*}" - echo "Missing source file..." - exit 1 + source_link_status=" (missing source file)" + else + source_link_status="" fi # Create link if it doesn't exist - ln -s ../${SRCSTAGE}/${FILE} ${LFS_PKG_DIR}/${FILE} + rcmd "Linking ${PACKAGE_NAME}${source_link_status}" ln -s ../${SRCSTAGE}/${FILE} ${LFS_PKG_DIR}/${FILE} fi # Create link for patches corresponding to that package: @@ -418,7 +436,7 @@ lpkg() PATCHFILE=$(basename ${patch}) if [ ! -h ${LFS_PKG_DIR}/${PATCHFILE} ]; then # Create link if it doesn't exist - ln -s ../${SRCSTAGE}/${PATCHFILE} ${LFS_PKG_DIR}/${PATCHFILE} + rcmd "Linking ${PATCHFILE}" ln -s ../${SRCSTAGE}/${PATCHFILE} ${LFS_PKG_DIR}/${PATCHFILE} fi done fi