#!/bin/bash source ../config/urls source ../functions/gztobz2 # Ownership of downloaded files FPKG_USER="root" FPKG_GROUP="users" # Test if the given file extension correspond to a compressed archive # Arg. #1: File extension is_extension_archive() { if [ ${#} -ne 1 ]; then echo "${FUNCNAME}(), wrong number of arguments: ${*}" return 1 fi local EXT=${1} for k in "gz" "tgz" "Z" "zip" "xz" "bz2"; do if [ "x${EXT}" = "x${k}" ]; then return 0 fi done return 1 } # Test if the given file extension correspond to a tar/gzip archive # Arg. #1: File extension is_extension_tar_gzip() { if [ ${#} -ne 1 ]; then echo "${FUNCNAME}(), wrong number of arguments: ${*}" return 1 fi local EXT=${1} for k in "tar.gz" "tgz" "tar.Z"; do if [ "x${EXT}" = "x${k}" ]; then return 0 fi done return 1 } # Test if the given file is an archive file # Arg. #1: File name is_archive() { if [ ${#} -ne 1 ]; then echo "${FUNCNAME}(), wrong number of arguments: ${*}" return 1 fi local F=${1} if file ${F} | grep -q --ignore-case -e "compressed data" -e "Zip archive data"; then return 0; else return 1 fi } # Remove any partially downloaded file. # Arg. #1: Partial file name without partial extension (.part). remove_partial_file() { rm -f ${1}.part } # Download a file, and making sure it is valid (at least it's header!) # Arg. #1: Source URL. # Arg. #2: Source filename (on server) # Arg. #3: Output directory (optional). Default = LFS_PKG_DIR # # Return: 0 : success # 1 : wget error # 2 : File not found or not an archive (if file extension was archive type) wget_wrapper() { if [ ${#} -lt 2 ]; then echo "${FUNCNAME}(), wrong number of arguments: ${*}" return 1 fi local URL=${1} local SOURCE=${2} local DESTDIR=${LFS_PKG_DIR} local wget_status if [ ${#} -eq 3 ]; then DESTDIR=${3} fi remove_partial_file ${DESTDIR}/${SOURCE} local STRING="Fetching ${SOURCE}" if [ "x${FTYPE}" = "x${FTYPE_PATCH}" ]; then STRING="${STRING} from ${URL}" fi rcmd "${STRING}" ${WGETCMD} ${URL}/${SOURCE} \ --output-document=${DESTDIR}/${SOURCE}.part wget_status=$? chown ${FPKG_USER}:${FPKG_GROUP} ${DESTDIR}/${SOURCE}.part chmod 664 ${DESTDIR}/${SOURCE}.part # Append log to global log file cat ${WGET_LOG_FILE} >> ${LFS_LOG_FILE} # Total failure if it was a connection timeout. if grep -q "failed: Connection timed out" ${WGET_LOG_FILE}; then echo "Error, wget reported: Connection timed out" return 1 fi # Partial failure if file was not found. if detect_file_not_found; then remove_partial_file ${DESTDIR}/${SOURCE} return 2 fi if [ ${wget_status} -ne 0 ]; then echo "Error: wget returned error status ${wget_status}" >> \ ${LFS_LOG_FILE} remove_partial_file ${DESTDIR}/${SOURCE} return 1 fi local FEXT=${SOURCE##*.} if is_extension_archive "${FEXT}"; then # Just to be sure, test if downloaded file is really an archive: if ! is_archive ${DESTDIR}/${SOURCE}.part; then # Partial failure if file is invalid. echo "Error: failed archive test" >> ${LFS_LOG_FILE} remove_partial_file ${DESTDIR}/${SOURCE} return 2 fi fi # Rename temporary file to final name mv ${DESTDIR}/${SOURCE}{.part,} chown ${FPKG_USER}:${FPKG_GROUP} ${DESTDIR}/${SOURCE} chmod 664 ${DESTDIR}/${SOURCE} return 0 } # Arg. #1: URL for patches repository. # Arg. #2: Destination filename. static_fetch_patches_list() { local wget_status PATCHES_URL=${1} PATCHES_LIST_FILENAME=${2} # Appending a slash (/) will download the directory content as a file named # index.html ${WGETCMD} "${PATCHES_URL}/" wget_status=$? # Append log to global log file cat ${WGET_LOG_FILE} >> ${LFS_LOG_FILE} if [ ${wget_status} -eq 0 ]; then mv ${LFS_PKG_DIR}/index.html ${PATCHES_LIST_FILENAME} else return 1 fi } # Arg. #1: If "test" is specified, set TEST_INTEGRITY to 1 update_packages_init() { if [ ${#} -eq 1 -a "x${1}" = "xtest" ]; then export TEST_INTEGRITY=1 fi # Execute a return statement instead of exit, to be able to continue with # downloading other packages export RCMD_NO_EXIT=1 init_log_file_update export LFS_PATCHES_LIST=${LFS_PKG_DIR}/patches/list-lfs.html export BLFS_PATCHES_LIST=${LFS_PKG_DIR}/patches/list-blfs.html export CLFS_PATCHES_LIST=${LFS_PKG_DIR}/patches/list-clfs.html export WGET_LOG_FILE="${LFS_LOG_DIR}/pkg-wget.log" export WGETCMD="wget --directory-prefix=${LFS_PKG_DIR} --timeout=15 --tries=3 -nc --continue --no-check-certificate --no-verbose --output-file=${WGET_LOG_FILE}" # Then create destination directory if it does not exists. if [ ! -d ${LFS_PKG_DIR} ]; then install -v -m775 -o ${FPKG_USER} -g ${FPKG_GROUP} \ -d ${LFS_PKG_DIR} 1> ${LFS_LOG_FILE} 2>&1 || exit 1 fi # Create patches destination directory if it does not exists. if [ ! -d ${LFS_PATCHES_DIR} ]; then install -v -m775 -o ${FPKG_USER} -g ${FPKG_GROUP} \ -d ${LFS_PATCHES_DIR} 1> ${LFS_LOG_FILE} 2>&1 || exit 1 fi if [ -n "${TEST_INTEGRITY}" ]; then # En mode test intégrité, on ne veut pas télécharger quoi que ce soit return 0 fi if [ ! -x /usr/bin/wget ]; then echo "${FUNCNAME}() error, the wget package was not found." return 1 fi # Temporary deactivate error checking. set +e # Getting list of all patches from LFS server. rcmd "Fetching LFS patches list" static_fetch_patches_list \ ${LFS_PATCHES_URL} ${LFS_PATCHES_LIST} # Getting list of all patches from BLFS server. rcmd "Fetching BLFS patches list" static_fetch_patches_list \ ${BLFS_PATCHES_URL} ${BLFS_PATCHES_LIST} # Getting list of all patches from CLFS server. rcmd "Fetching CLFS patches list" static_fetch_patches_list \ ${CLFS_PATCHES_URL} ${CLFS_PATCHES_LIST} # Reactivate error checking. set -e } # Get patch for package if it is not in the repository # Arg. #1: Package name and version # Arg. #2: Patches list file (HTML) # Arg. #3: Patches URL static_checkpatch() { local PACK=${1} local PATCHES_LIST=${2} local PATCHES_URL=${3} # Make sure patch list file exists if [ ! -f ${PATCHES_LIST} ]; then return fi # Remplace les "+" par "%2B" local PACK_URL=$(echo $PACK | sed s!\+!%2B!g) # Patches list formats (patches-list-*.html): # LFS: a href="name.patch" # hugovil: a href="dir/subdir/subdir/name.patch" # We must search for a patch beginning with either a slash or a " to avoid # the possibility of having another package name within a patch name: # if patch = Mesalib-8.0.4-llvm-3.1-fixes-1.patch # then we could erroneously try to download patch "llvm-3.1-fixes-1.patch" local PATCHES_FOUND=$(cat ${PATCHES_LIST} | \ egrep "\"${PACK_URL}-|/${PACK_URL}-" | \ egrep ".patch\"" | \ sed "s/.*\(${PACK_URL}-.*\.patch\)\".*/\1/") if [ -n "${PATCHES_FOUND}" ]; then for p in ${PATCHES_FOUND}; do # Remplace les "%2B" par "+" PATCH_NAME=$(echo ${p} | sed s!%2B!\+!g) if [ ! -f ${LFS_PATCHES_DIR}/${PATCH_NAME} ]; then wget_wrapper ${PATCHES_URL} ${PATCH_NAME} ${LFS_PATCHES_DIR} fi done fi } # Get patch package if it is not in the repository # Arg. #1: Package name and version static_getpatch() { PACK=${1} # Checking for correct number of arguments if [ $# -ne 1 ]; then echo "${FUNCNAME}(), wrong number of arguments: ${*}" return 1 fi # Checking if patch is available from LFS. static_checkpatch ${PACK} ${LFS_PATCHES_LIST} ${LFS_PATCHES_URL} # Checking if patch is available from BLFS. static_checkpatch ${PACK} ${BLFS_PATCHES_LIST} ${BLFS_PATCHES_URL} # Checking if patch is available from CLFS. static_checkpatch ${PACK} ${CLFS_PATCHES_LIST} ${CLFS_PATCHES_URL} } detect_file_not_found() { # HTTP: will return "ERROR 404: Not Found" # FTP: will say "File not found" or "No such file" if grep -q --ignore-case -e "not found" -e "no such file" ${WGET_LOG_FILE}; \ then #echo "404 NOTFOUND" return 0 fi return 1 } # Get package if it is not in the repository # Arg. #1: Package name and version # Arg. #2: URL # Arg. #3: Optional extension static_getpkg() { local PACK=${1} local URL=${2} local PREFERRED_EXT=${3} # Checking for correct number of arguments if [ $# -ne 2 -a $# -ne 3 ]; then echo "${FUNCNAME}(), wrong number of arguments: ${*}" return 1 fi if [ -n "${PREFERRED_EXT}" ]; then # On met l'extension spécifiée en premier MODLIST=$(echo ${DEFAULT_ARCH_EXT_LIST} | \ sed "s@${PREFERRED_EXT}@@") PREFERRED_EXT="${PREFERRED_EXT} ${MODLIST}" else # List of default archive extensions to try PREFERRED_EXT="${DEFAULT_ARCH_EXT_LIST}" fi wget_wrapper_status= url_list="${URL}" if [ x"${URL}" != x"${HV_PACKAGES_URL}" ]; then url_list+=" ${HV_PACKAGES_URL}" fi for url in ${url_list}; do for arch_ext in ${PREFERRED_EXT}; do wget_wrapper ${url} ${PACK}.${arch_ext} wget_wrapper_status=$? #if [ ${wget_wrapper_status} -eq 1 ]; then # return 1; #fi if [ ${wget_wrapper_status} -eq 0 ]; then break; fi done if [ ${wget_wrapper_status} -eq 0 ]; then break; fi done # Failure or file not found if [ ${wget_wrapper_status} -gt 0 ]; then return 1; fi # If we are here, it means the file was successfully downloaded. FINAL_EXT=${arch_ext} # Convert to bzip2 format if requested and if it is a tar/gz archive if [ -n "${PREFER_BZIP2_ARCHIVE}" ]; then if is_extension_tar_gzip "${arch_ext}" ; then rcmd "Converting ${PACK}.${arch_ext} to bzip2 format" \ gztobz2 ${LFS_PKG_DIR}/${PACK}.${arch_ext} FINAL_EXT="tar.bz2" fi fi return 0 } # Test integrity of archive # Arg. #1: package name # Arg. #2: filename # Arg. #3: extension test_archive_integrity() { if [ $# -ne 3 ]; then echo "${FUNCNAME}(), wrong number of arguments: ${*}" return 1 fi local PACK=${1} local FILE=${2} local EXT=${3} if [ ! -f ${FILE} -o -h ${FILE} ]; then # The file may be absent, or a symbolic link. Abort without error return 0 fi local TESTCMD="" if [ "x${EXT}" = "xtar.bz2" ]; then TESTCMD="bzip2 -t" elif is_extension_tar_gzip "${EXT}"; then TESTCMD="gunzip -t" elif [ "x${EXT}" = "xtar.xz" ]; then TESTCMD="unxz -t" elif [ "x${EXT}" = "xzip" ]; then TESTCMD="unzip -t" else # Can only test gzip, bzip2, xz and zip archives. return 0 fi rcmd "Testing ${PACK}.${EXT}" ${TESTCMD} ${FILE} } # Get package if it is not in the repository # Arg. #1: Package name and version # Arg. #2: URL # Options: # -d Fetch directory (default is LFS_PKG_DIR) # -e File extension (default is tar.bz2) # -f Filename on server (default is PACKAGE) # -m Mode (for common package types): # gnu # gnome # sf (sourceforge) # xorg # apa (apache) # hv (hugovil.com) # lfs # pm (Perl module via CPAN) # fd (freedesktop.org) # -o Option specific to mode # -s DIR Subdirectory on server # -v NUM Subdirectory on server is equal to package version. If -s option is specified, then # append version number to value of -s argument: # NUM= Full version number # NUM=1 First number of package version # NUM=2 First two numbers of package version # Ex: fpkg -s "v" -v 2 mypkg-1.2.3 http://mypkg.com would fetch from url: # http://mypkg.com/v1.2/mypkg-1.2.3.tar.bz2 # -w First erase destination file if it exists (except in test mode) fpkg() { local ORIG_ARGS=${*} local DEST_DIR="" local FILE_EXT="" local SRC_FILENAME="" local MODE="" local MODE_OPT="" local SRC_DIR="" local FD_SUBDIR_FINAL="" local VARG="" unset VOPTION unset ERASE_FIRST while getopts "d:e:f:m:o:s:v:w" flag ;do case ${flag} in d) # Fetch directory (where to put file) DEST_DIR=${OPTARG} ;; e) # File extension FILE_EXT=${OPTARG} ;; f) # Filename if different than package name SRC_FILENAME=${OPTARG} ;; m) MODE=${OPTARG} ;; o) MODE_OPT=${OPTARG} ;; s) SRC_DIR=${OPTARG} ;; v) VOPTION="y" VARG=${OPTARG} ;; w) ERASE_FIRST="y" ;; ?) echo "${FUNCNAME}(): Invalid option: ${OPTARG}." return 1 ;; esac done shift `expr "${OPTIND}" - 1` unset OPTSTRING unset OPTIND unset OPTARG # Checking for correct number of arguments if [ $# -lt 1 -o $# -gt 2 ]; then echo "${FUNCNAME}(), wrong number of arguments: ${ORIG_ARGS}" return 1 fi local PACK=${1} if [ $# -eq 2 ]; then local URL=${2} fi if [ -n "${VOPTION}" ]; then # Append version number to SRC_DIR (if specified) case ${VARG} in 0) # Full version number SRC_DIR+="$(get_pkg_ver ${PACK})" ;; 1) SRC_DIR+="$(get_pkg_ver1 ${PACK})" ;; 2) SRC_DIR+="$(get_pkg_ver2 ${PACK})" ;; 3) SRC_DIR+="$(get_pkg_ver3 ${PACK})" ;; *) echo "${FUNCNAME}(), invalid -v argument: ${VARG}" return 1 ;; esac fi if [ -n "${MODE}" ]; then case ${MODE} in gnu) URL=${GNU_URL} if [ -z "${SRC_DIR}" ]; then # Default subdirectory on server SRC_DIR=$(get_pkg_name ${PACK}) fi if [ -z "${FILE_EXT}" ]; then # Default file extension is tar.gz FILE_EXT="tar.xz" fi ;; gnome) URL=${GNOME_URL} if [ -z "${SRC_DIR}" ]; then # Default subdirectory on server SRC_DIR=$(get_pkg_name ${PACK}) fi SRC_DIR="${SRC_DIR}/$(get_pkg_ver2 ${PACK})" if [ -z "${FILE_EXT}" ]; then # Default file extension is xz FILE_EXT="tar.xz" fi ;; sf) URL=${SOURCEFORGE_URL} if [ -z "${SRC_DIR}" ]; then # Default subdirectory on server SRC_DIR=$(get_pkg_name ${PACK}) fi if [ -z "${FILE_EXT}" ]; then # Default file extension is tar.gz FILE_EXT="tar.gz" fi ;; xorg) URL=${XORG_URL} if [ -z "${SRC_DIR}" ]; then echo "${FUNCNAME}(), mode 'xorg' needs '-s' option" return 1 fi ;; fd) # Most common layout: # http://pkg-config.freedesktop.org/releases/ if [ -z "${MODE_OPT}" ]; then # Default subdirectory or module on server MODE_OPT=$(get_pkg_name ${PACK}) fi if [ -z "${FILE_EXT}" ]; then # Default file extension is always tar.gz??? FILE_EXT="tar.gz" fi FD_SUBDIR_FINAL="releases" if [ -n "${SRC_DIR}" ]; then # Optional final subdir: # releases (default) # dist # none (don't append anything) if [ "x${SRC_DIR}" = "xnone" ]; then FD_SUBDIR_FINAL="" else FD_SUBDIR_FINAL=${SRC_DIR} fi fi URL="http://${MODE_OPT}.freedesktop.org/${FD_SUBDIR_FINAL}" SRC_DIR="" ;; apa) URL=${APACHE_URL} if [ -z "${SRC_DIR}" ]; then # Default subdirectory on server SRC_DIR=$(get_pkg_name ${PACK}) fi ;; hv) URL=${HV_PACKAGES_URL} ;; pm) URL=${CPAN_AUTHORS_URL} if [ -z "${SRC_DIR}" ]; then # Default subdirectory on server SRC_DIR=$(get_pkg_name ${PACK}) fi if [ -z "${FILE_EXT}" ]; then # Default file extension is tar.gz FILE_EXT="tar.gz" fi ;; lfs) URL=${LFS_PACKAGES_URL} ;; *) echo "${FUNCNAME}(), unsupported mode ${MODE}" return 1 ;; esac fi if [ -z "${SRC_FILENAME}" ]; then # Default source filename = name of package SRC_FILENAME=${PACK} fi # Set FINAL_EXT to the final extension of the downloaded file if [ -z "${FILE_EXT}" ]; then # Default format is tar/bzip2 archive FINAL_EXT=tar.bz2 else if [ -n "${PREFER_BZIP2_ARCHIVE}" ]; then # Force conversion to tar/bzip2 archive FINAL_EXT=tar.bz2 else # Do not convert FINAL_EXT=${FILE_EXT} fi fi if [ -z "${DEST_DIR}" ]; then # Default destination directory DEST_DIR=${LFS_PKG_DIR} fi mkdir -v -p ${DEST_DIR} >> ${LFS_LOG_FILE} 2>&1 if [ -z "${SRC_DIR}" ]; then # Default source subdirectory on server SRC_DIR="" else URL=${URL}/${SRC_DIR} fi # Search for any valid archive types... for arch_ext in ${DEFAULT_ARCH_EXT_LIST}; do if [ -f ${DEST_DIR}/${PACK}.${arch_ext} ]; then FINAL_EXT=${arch_ext} fi done DEST_FILE=${DEST_DIR}/${PACK}.${FINAL_EXT} if [ -z "${TEST_INTEGRITY}" ]; then if [ -f ${DEST_FILE} -a -n "${ERASE_FIRST}" ]; then rm ${DEST_FILE} fi if [ ! -f ${DEST_FILE} ]; then # Fetch package set +e static_getpkg ${SRC_FILENAME} ${URL} ${FILE_EXT} rc=$? if [ ${rc} -ne 0 ]; then return ${rc}; fi set -e # Move file if source filename is not equal to package name and/or # destination directory is not the default: DOWNLOADED_FILE=${LFS_PKG_DIR}/${SRC_FILENAME}.${FINAL_EXT} DEST_FILE=${DEST_DIR}/${PACK}.${FINAL_EXT} if [ "${DEST_FILE}" != "${DOWNLOADED_FILE}" ]; then mv ${DOWNLOADED_FILE} ${DEST_FILE} fi fi else set +e test_archive_integrity "${PACK}" "${DEST_FILE}" "${FINAL_EXT}" set -e # We do not want to fetch patches when testing integrity, # so return immediately return 0 fi # Check for available patches with PACKAGE (TARGET) name. static_getpatch ${PACK} if [ ${SRC_FILENAME} != ${PACK} ]; then # Check for available patches with SRC_FILENAME name. static_getpatch ${SRC_FILENAME} # Rename any patch fetched (in fpkg call) and replace SOURCE by TARGET # in patch name. local PATCHES_LIST="${LFS_PATCHES_DIR}/${SRC_FILENAME}-*.patch" if ls ${PATCHES_LIST} 1> /dev/null 2>&1; then echo "CMD=${SRC_FILENAME} ${PACK} ${PATCHES_LIST}" rename ${SRC_FILENAME} ${PACK} ${PATCHES_LIST} fi fi }