buildgcc: try curl if wget is not present
There are systems that come with curl but not wget (eg macOS) and they now have to install one less additional dependency. Also fix some cosmetic issues in console output and require valid certificates on https downloads. Change-Id: Idc2ce892fbb6629aebfe1ae2a95dcef4d5d93aca Signed-off-by: Patrick Georgi <pgeorgi@chromium.org> Reviewed-on: https://review.coreboot.org/18048 Tested-by: build bot (Jenkins) Reviewed-by: Stefan Reinauer <stefan.reinauer@coreboot.org> Reviewed-by: Nico Huber <nico.h@gmx.de>
This commit is contained in:
parent
3d0288d676
commit
df1ff231e4
|
@ -130,9 +130,9 @@ please_install()
|
||||||
*) solution="using your OS packaging system" ;;
|
*) solution="using your OS packaging system" ;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
printf "${RED}ERROR:${red} Missing tool: Please install \'$1\'. (eg $solution)${NC}\n" >&2
|
printf "${RED}ERROR:${red} Missing tool: Please install '$1'. (eg $solution)${NC}\n" >&2
|
||||||
if [ -n "$2" ]; then
|
if [ -n "$2" ]; then
|
||||||
printf "${RED}ERROR:${red} or install \'$2\'.${NC}\n" >&2
|
printf "${RED}ERROR:${red} or install '$2'.${NC}\n" >&2
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -282,15 +282,6 @@ compute_sum() {
|
||||||
printf "(checksum created. ${RED}Note. Please upload sum/$1.cksum if the corresponding archive is upgraded.)${NC}"
|
printf "(checksum created. ${RED}Note. Please upload sum/$1.cksum if the corresponding archive is upgraded.)${NC}"
|
||||||
}
|
}
|
||||||
|
|
||||||
download_showing_percentage() {
|
|
||||||
url=$1
|
|
||||||
printf " ..${red} 0%%"
|
|
||||||
wget --no-check-certificate $url 2>&1 | while read line; do
|
|
||||||
echo $line | grep -o "[0-9]\+%" | awk '{printf("\b\b\b\b%4s", $1)}'
|
|
||||||
done
|
|
||||||
printf "${NC}"
|
|
||||||
}
|
|
||||||
|
|
||||||
download() {
|
download() {
|
||||||
package=$1
|
package=$1
|
||||||
archive="$(eval echo \$$package"_ARCHIVE")"
|
archive="$(eval echo \$$package"_ARCHIVE")"
|
||||||
|
@ -299,7 +290,7 @@ download() {
|
||||||
printf " * $FILE "
|
printf " * $FILE "
|
||||||
|
|
||||||
if test -f tarballs/$FILE && check_sum $FILE ; then
|
if test -f tarballs/$FILE && check_sum $FILE ; then
|
||||||
printf "(cached)"
|
echo "(cached)"
|
||||||
else
|
else
|
||||||
printf "(downloading from $archive)"
|
printf "(downloading from $archive)"
|
||||||
rm -f tarballs/$FILE
|
rm -f tarballs/$FILE
|
||||||
|
@ -310,10 +301,9 @@ download() {
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ ! -f tarballs/$FILE ]; then
|
if [ ! -f tarballs/$FILE ]; then
|
||||||
printf "\n${RED}Failed to download $FILE.${NC}\n"
|
printf "${RED}Failed to download $FILE.${NC}\n"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
printf "\n"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
unpack_and_patch() {
|
unpack_and_patch() {
|
||||||
|
@ -891,9 +881,25 @@ searchtool flex flex > /dev/null
|
||||||
searchtool g++ "Free Software Foundation" nofail > /dev/null || \
|
searchtool g++ "Free Software Foundation" nofail > /dev/null || \
|
||||||
searchtool clang "clang version" nofail > /dev/null || \
|
searchtool clang "clang version" nofail > /dev/null || \
|
||||||
searchtool clang "LLVM" "" "g++" > /dev/null
|
searchtool clang "LLVM" "" "g++" > /dev/null
|
||||||
searchtool wget > /dev/null
|
|
||||||
searchtool bzip2 "bzip2," > /dev/null
|
searchtool bzip2 "bzip2," > /dev/null
|
||||||
|
|
||||||
|
if searchtool wget "GNU" nofail > /dev/null; then
|
||||||
|
download_showing_percentage() {
|
||||||
|
url=$1
|
||||||
|
printf " ..${red} 0%%"
|
||||||
|
wget $url 2>&1 | while read line; do
|
||||||
|
echo $line | grep -o "[0-9]\+%" | awk '{printf("\b\b\b\b%4s", $1)}'
|
||||||
|
done
|
||||||
|
echo "${NC}"
|
||||||
|
}
|
||||||
|
elif searchtool curl "^curl " > /dev/null; then
|
||||||
|
download_showing_percentage() {
|
||||||
|
url=$1
|
||||||
|
echo
|
||||||
|
curl -#OL $url
|
||||||
|
}
|
||||||
|
fi
|
||||||
|
|
||||||
check_for_library "-lz" "zlib (zlib1g-dev or zlib-devel)"
|
check_for_library "-lz" "zlib (zlib1g-dev or zlib-devel)"
|
||||||
|
|
||||||
CC=cc
|
CC=cc
|
||||||
|
|
Loading…
Reference in New Issue