From e7b39e6ffc3015e24d83243e819d8dddcc72112e Mon Sep 17 00:00:00 2001 From: Kroese Date: Thu, 16 May 2024 20:07:44 +0200 Subject: [PATCH] feat: Refactor download code --- src/mido.sh | 305 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 305 insertions(+) diff --git a/src/mido.sh b/src/mido.sh index 21d6341..bce6461 100644 --- a/src/mido.sh +++ b/src/mido.sh @@ -1,6 +1,311 @@ #!/usr/bin/env bash set -Eeuo pipefail +handle_curl_error() { + local error_code="$1" + local fatal_error_action=2 + case "$error_code" in + 6) + echo "Failed to resolve Microsoft servers! Is there an Internet connection? Exiting..." + return "$fatal_error_action" + ;; + 7) + echo "Failed to contact Microsoft servers! Is there an Internet connection or is the server down?" + ;; + 8) + echo "Microsoft servers returned a malformed HTTP response!" + ;; + 22) + echo "Microsoft servers returned a failing HTTP status code!" + ;; + 23) + echo "Failed at writing Windows media to disk! Out of disk space or permission error? Exiting..." + return "$fatal_error_action" + ;; + 26) + echo "Ran out of memory during download! Exiting..." + return "$fatal_error_action" + ;; + 36) + echo "Failed to continue earlier download!" + ;; + 63) + echo "Microsoft servers returned an unexpectedly large response!" + ;; + # POSIX defines exit statuses 1-125 as usable by us + # https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html#tag_18_08_02 + $((error_code <= 125))) + # Must be some other server or network error (possibly with this specific request/file) + # This is when accounting for all possible errors in the curl manual assuming a correctly formed curl command and an HTTP(S) request, using only the curl features we're using, and a sane build + echo "Miscellaneous server or network error!" + ;; + 126 | 127 ) + echo "Curl command not found! Please install curl and try again. Exiting..." + return "$fatal_error_action" + ;; + # Exit statuses are undefined by POSIX beyond this point + *) + case "$(kill -l "$error_code")" in + # Signals defined to exist by POSIX: + # https://pubs.opengroup.org/onlinepubs/009695399/basedefs/signal.h.html + INT) + echo "Curl was interrupted!" + ;; + # There could be other signals but these are most common + SEGV | ABRT ) + echo "Curl crashed! Failed exploitation attempt? Please report any core dumps to curl developers. Exiting..." + return "$fatal_error_action" + ;; + *) + echo "Curl terminated due to a fatal signal!" + ;; + esac + esac + return 1 +} + +download_windows_server() { + local iso_download_page_html="" + # Copyright (C) 2024 Elliot Killick + # This function is adapted from the Mido project: + # https://github.com/ElliotKillick/Mido + + # Download enterprise evaluation Windows versions + local windows_version="$1" + local enterprise_type="$2" + local PRETTY_RELEASE="" + + case "${RELEASE}" in + "10-ltsc") PRETTY_RELEASE="10 LTSC";; + "2012-r2") PRETTY_RELEASE="2012 R2";; + *) PRETTY_RELEASE="${RELEASE}";; + esac + + echo "Downloading $(pretty_name "${OS}") ${PRETTY_RELEASE} (${I18N})" + + local url="https://www.microsoft.com/en-us/evalcenter/download-$windows_version" + + echo " - Parsing download page: ${url}" + iso_download_page_html="$(curl --silent --location --max-filesize 1M --fail --proto =https --tlsv1.2 --http1.1 -- "$url")" || { + handle_curl_error $? + return $? + } + + if ! [ "$iso_download_page_html" ]; then + # This should only happen if there's been some change to where this download page is located + echo " - Windows server download page gave us an empty response" + return 1 + fi + + local CULTURE="" + local COUNTRY="" + case "${I18N}" in + "English (Great Britain)") + CULTURE="en-gb" + COUNTRY="GB";; + "Chinese (Simplified)") + CULTURE="zh-cn" + COUNTRY="CN";; + "Chinese (Traditional)") + CULTURE="zh-tw" + COUNTRY="TW";; + "French") + CULTURE="fr-fr" + COUNTRY="FR";; + "German") + CULTURE="de-de" + COUNTRY="DE";; + "Italian") + CULTURE="it-it" + COUNTRY="IT";; + "Japanese") + CULTURE="ja-jp" + COUNTRY="JP";; + "Korean") + CULTURE="ko-kr" + COUNTRY="KR";; + "Portuguese (Brazil)") + CULTURE="pt-br" + COUNTRY="BR";; + "Spanish") + CULTURE="es-es" + COUNTRY="ES";; + "Russian") + CULTURE="ru-ru" + COUNTRY="RU";; + *) + CULTURE="en-us" + COUNTRY="US";; + esac + + echo " - Getting download link.." + iso_download_links="$(echo "$iso_download_page_html" | grep -o "https://go.microsoft.com/fwlink/p/?LinkID=[0-9]\+&clcid=0x[0-9a-z]\+&culture=${CULTURE}&country=${COUNTRY}")" || { + # This should only happen if there's been some change to the download endpoint web address + echo " - Windows server download page gave us no download link" + return 1 + } + + # Limit untrusted size for input validation + iso_download_links="$(echo "$iso_download_links" | head -c 1024)" + + case "$enterprise_type" in + # Select x64 download link + "enterprise") iso_download_link=$(echo "$iso_download_links" | head -n 2 | tail -n 1) ;; + # Select x64 LTSC download link + "ltsc") iso_download_link=$(echo "$iso_download_links" | head -n 4 | tail -n 1) ;; + *) iso_download_link="$iso_download_links" ;; + esac + + # Follow redirect so proceeding log message is useful + # This is a request we make this Fido doesn't + # We don't need to set "--max-filesize" here because this is a HEAD request and the output is to /dev/null anyway + iso_download_link="$(curl --silent --location --output /dev/null --silent --write-out "%{url_effective}" --head --fail --proto =https --tlsv1.2 --http1.1 -- "$iso_download_link")" || { + # This should only happen if the Microsoft servers are down + handle_curl_error $? + return $? + } + + # Limit untrusted size for input validation + iso_download_link="$(echo "$iso_download_link" | head -c 1024)" + + echo " - URL: $iso_download_link" + + # Download ISO + FILE_NAME="${iso_download_link##*/}" + web_get "${iso_download_link}" "${VM_PATH}" "${FILE_NAME}" + OS="windows-server" +} + +download_windows_workstation() { + local HASH="" + local session_id="" + local iso_download_page_html="" + local product_edition_id="" + local language_skuid_table_html="" + local sku_id="" + local iso_download_link_html="" + local iso_download_link="" + + echo "Downloading Windows ${RELEASE} (${I18N})" + # This function is adapted from the Mido project: + # https://github.com/ElliotKillick/Mido + # Download newer consumer Windows versions from behind gated Microsoft API + + # Either 8, 10, or 11 + local windows_version="$1" + + local url="https://www.microsoft.com/en-us/software-download/windows$windows_version" + case "$windows_version" in + 8 | 10) url="${url}ISO";; + esac + + local user_agent="Mozilla/5.0 (X11; Linux x86_64; rv:100.0) Gecko/20100101 Firefox/100.0" + # uuidgen: For MacOS (installed by default) and other systems (e.g. with no /proc) that don't have a kernel interface for generating random UUIDs + session_id="$(cat /proc/sys/kernel/random/uuid 2> /dev/null || uuidgen --random)" + + # Get product edition ID for latest release of given Windows version + # Product edition ID: This specifies both the Windows release (e.g. 22H2) and edition ("multi-edition" is default, either Home/Pro/Edu/etc., we select "Pro" in the answer files) in one number + # This is the *only* request we make that Fido doesn't. Fido manually maintains a list of all the Windows release/edition product edition IDs in its script (see: $WindowsVersions array). This is helpful for downloading older releases (e.g. Windows 10 1909, 21H1, etc.) but we always want to get the newest release which is why we get this value dynamically + # Also, keeping a "$WindowsVersions" array like Fido does would be way too much of a maintenance burden + # Remove "Accept" header that curl sends by default + echo " - Parsing download page: ${url}" + iso_download_page_html="$(curl --silent --user-agent "$user_agent" --header "Accept:" --max-filesize 1M --fail --proto =https --tlsv1.2 --http1.1 -- "$url")" || { + handle_curl_error $? + return $? + } + + echo -n " - Getting Product edition ID: " + # tr: Filter for only numerics to prevent HTTP parameter injection + # head -c was recently added to POSIX: https://austingroupbugs.net/view.php?id=407 + product_edition_id="$(echo "$iso_download_page_html" | grep -Eo '