[cmake] refactor: cpmfile, deps prefetch, force system and more #322
5 changed files with 34 additions and 35 deletions
|
@ -144,6 +144,7 @@ function(AddJsonPackage)
|
||||||
get_json_element("${object}" hash hash "")
|
get_json_element("${object}" hash hash "")
|
||||||
get_json_element("${object}" sha sha "")
|
get_json_element("${object}" sha sha "")
|
||||||
get_json_element("${object}" url url "")
|
get_json_element("${object}" url url "")
|
||||||
|
get_json_element("${object}" key key "")
|
||||||
get_json_element("${object}" tag tag "")
|
get_json_element("${object}" tag tag "")
|
||||||
get_json_element("${object}" artifact artifact "")
|
get_json_element("${object}" artifact artifact "")
|
||||||
get_json_element("${object}" git_version git_version "")
|
get_json_element("${object}" git_version git_version "")
|
||||||
|
@ -194,6 +195,7 @@ function(AddJsonPackage)
|
||||||
HASH "${hash}"
|
HASH "${hash}"
|
||||||
SHA "${sha}"
|
SHA "${sha}"
|
||||||
REPO "${repo}"
|
REPO "${repo}"
|
||||||
|
KEY "${key}"
|
||||||
PATCHES "${patches}"
|
PATCHES "${patches}"
|
||||||
OPTIONS "${options}"
|
OPTIONS "${options}"
|
||||||
FIND_PACKAGE_ARGUMENTS "${find_args}"
|
FIND_PACKAGE_ARGUMENTS "${find_args}"
|
||||||
|
|
18
externals/nx_tzdb/CMakeLists.txt
vendored
18
externals/nx_tzdb/CMakeLists.txt
vendored
|
@ -32,28 +32,14 @@ if (CMAKE_SYSTEM_NAME STREQUAL "Windows" OR ANDROID)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
set(NX_TZDB_VERSION "250725")
|
set(NX_TZDB_VERSION "250725")
|
||||||
set(NX_TZDB_ARCHIVE "${CPM_SOURCE_CACHE}/nx_tzdb/${NX_TZDB_VERSION}.zip")
|
|
||||||
|
|
||||||
set(NX_TZDB_ROMFS_DIR "${CPM_SOURCE_CACHE}/nx_tzdb/tz")
|
set(NX_TZDB_ROMFS_DIR "${CPM_SOURCE_CACHE}/nx_tzdb/tz")
|
||||||
|
|
||||||
if ((NOT CAN_BUILD_NX_TZDB OR YUZU_DOWNLOAD_TIME_ZONE_DATA) AND NOT EXISTS ${NX_TZDB_ROMFS_DIR})
|
if ((NOT CAN_BUILD_NX_TZDB OR YUZU_DOWNLOAD_TIME_ZONE_DATA) AND NOT EXISTS ${NX_TZDB_ROMFS_DIR})
|
||||||
set(NX_TZDB_DOWNLOAD_URL "https://github.com/crueter/tzdb_to_nx/releases/download/${NX_TZDB_VERSION}/${NX_TZDB_VERSION}.zip")
|
message(STATUS "Downloading time zone data...")
|
||||||
|
|
||||||
message(STATUS "Downloading time zone data from ${NX_TZDB_DOWNLOAD_URL}...")
|
|
||||||
file(DOWNLOAD ${NX_TZDB_DOWNLOAD_URL} ${NX_TZDB_ARCHIVE}
|
|
||||||
STATUS NX_TZDB_DOWNLOAD_STATUS)
|
|
||||||
list(GET NX_TZDB_DOWNLOAD_STATUS 0 NX_TZDB_DOWNLOAD_STATUS_CODE)
|
|
||||||
if (NOT NX_TZDB_DOWNLOAD_STATUS_CODE EQUAL 0)
|
|
||||||
message(FATAL_ERROR "Time zone data download failed (status code ${NX_TZDB_DOWNLOAD_STATUS_CODE})")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
AddJsonPackage(tzdb)
|
AddJsonPackage(tzdb)
|
||||||
|
|
||||||
file(ARCHIVE_EXTRACT
|
set(NX_TZDB_ROMFS_DIR ${nx_tzdb_SOURCE_DIR})
|
||||||
INPUT
|
|
||||||
${NX_TZDB_ARCHIVE}
|
|
||||||
DESTINATION
|
|
||||||
${NX_TZDB_ROMFS_DIR})
|
|
||||||
elseif (CAN_BUILD_NX_TZDB AND NOT YUZU_DOWNLOAD_TIME_ZONE_DATA)
|
elseif (CAN_BUILD_NX_TZDB AND NOT YUZU_DOWNLOAD_TIME_ZONE_DATA)
|
||||||
# TODO(crueter): this sucked to do with cpm, see if i can get it to work again
|
# TODO(crueter): this sucked to do with cpm, see if i can get it to work again
|
||||||
message(FATAL_ERROR "Building tzdb is currently unsupported. Check back later.")
|
message(FATAL_ERROR "Building tzdb is currently unsupported. Check back later.")
|
||||||
|
|
1
externals/nx_tzdb/cpmfile.json
vendored
1
externals/nx_tzdb/cpmfile.json
vendored
|
@ -1,5 +1,6 @@
|
||||||
{
|
{
|
||||||
"tzdb": {
|
"tzdb": {
|
||||||
|
"package": "nx_tzdb",
|
||||||
"url": "https://github.com/crueter/tzdb_to_nx/releases/download/250725/250725.zip",
|
"url": "https://github.com/crueter/tzdb_to_nx/releases/download/250725/250725.zip",
|
||||||
"hash": "8f60b4b29f285e39c0443f3d5572a73780f3dbfcfd5b35004451fadad77f3a215b2e2aa8d0fffe7e348e2a7b0660882b35228b6178dda8804a14ce44509fd2ca",
|
"hash": "8f60b4b29f285e39c0443f3d5572a73780f3dbfcfd5b35004451fadad77f3a215b2e2aa8d0fffe7e348e2a7b0660882b35228b6178dda8804a14ce44509fd2ca",
|
||||||
"version": "250725"
|
"version": "250725"
|
||||||
|
|
|
@ -6,5 +6,5 @@
|
||||||
# SPDX-FileCopyrightText: 2025 crueter
|
# SPDX-FileCopyrightText: 2025 crueter
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
LIBS=$(find . externals src/yuzu/externals externals/ffmpeg src/dynarmic/externals -maxdepth 1 -name cpmfile.json -exec jq -j 'keys_unsorted | join(" ")' {} \; -printf " ")
|
LIBS=$(find . externals externals/nx_tzdb src/yuzu/externals externals/ffmpeg src/dynarmic/externals -maxdepth 1 -name cpmfile.json -exec jq -j 'keys_unsorted | join(" ")' {} \; -printf " ")
|
||||||
tools/cpm-fetch.sh $LIBS
|
tools/cpm-fetch.sh $LIBS
|
|
@ -23,22 +23,24 @@ download_package() {
|
||||||
OUTDIR="${CPM_SOURCE_CACHE}/${LOWER_PACKAGE}/${KEY}"
|
OUTDIR="${CPM_SOURCE_CACHE}/${LOWER_PACKAGE}/${KEY}"
|
||||||
[ -d "$OUTDIR" ] && return
|
[ -d "$OUTDIR" ] && return
|
||||||
|
|
||||||
curl "$DOWNLOAD" -s -L -o "$OUTFILE"
|
curl "$DOWNLOAD" -sS -L -o "$OUTFILE"
|
||||||
echo $OUTFILE
|
|
||||||
|
ACTUAL_HASH=$(${HASH_ALGO}sum "$OUTFILE" | cut -d" " -f1)
|
||||||
|
[ "$ACTUAL_HASH" != "$HASH" ] && echo "$FILENAME did not match expected hash; expected $HASH but got $ACTUAL_HASH" && exit 1
|
||||||
|
|
||||||
mkdir -p "$OUTDIR"
|
mkdir -p "$OUTDIR"
|
||||||
|
|
||||||
pushd "$OUTDIR"
|
pushd "$OUTDIR" > /dev/null
|
||||||
|
|
||||||
case "$FILENAME" in
|
case "$FILENAME" in
|
||||||
(*.7z)
|
(*.7z)
|
||||||
7z x "$OUTFILE"
|
7z x "$OUTFILE" > /dev/null
|
||||||
;;
|
;;
|
||||||
(*.tar*)
|
(*.tar*)
|
||||||
tar xf "$OUTFILE"
|
tar xf "$OUTFILE" > /dev/null
|
||||||
;;
|
;;
|
||||||
(*.zip)
|
(*.zip)
|
||||||
unzip "$OUTFILE"
|
unzip "$OUTFILE" > /dev/null
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
|
@ -50,18 +52,18 @@ download_package() {
|
||||||
if [ $(wc -l <<< "$DIRS") -eq 2 ]; then
|
if [ $(wc -l <<< "$DIRS") -eq 2 ]; then
|
||||||
SUBDIR=$(find . -maxdepth 1 -type d -not -name ".")
|
SUBDIR=$(find . -maxdepth 1 -type d -not -name ".")
|
||||||
mv "$SUBDIR"/* .
|
mv "$SUBDIR"/* .
|
||||||
mv "$SUBDIR"/.* . || true
|
mv "$SUBDIR"/.* . 2>/dev/null || true
|
||||||
rmdir "$SUBDIR"
|
rmdir "$SUBDIR"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if grep -e "patches" <<< "$JSON"; then
|
if grep -e "patches" <<< "$JSON" > /dev/null; then
|
||||||
PATCHES=$(jq -r '.patches | join(" ")' <<< "$JSON")
|
PATCHES=$(jq -r '.patches | join(" ")' <<< "$JSON")
|
||||||
for patch in $PATCHES; do
|
for patch in $PATCHES; do
|
||||||
patch -p1 < "$ROOTDIR"/.patch/$package/$patch
|
patch -p1 < "$ROOTDIR"/.patch/$package/$patch
|
||||||
done
|
done
|
||||||
fi
|
fi
|
||||||
|
|
||||||
popd
|
popd > /dev/null
|
||||||
}
|
}
|
||||||
|
|
||||||
ci_package() {
|
ci_package() {
|
||||||
|
@ -78,12 +80,23 @@ ci_package() {
|
||||||
|
|
||||||
[ "$REPO" == null ] && echo "No repo defined for CI package $package" && return
|
[ "$REPO" == null ] && echo "No repo defined for CI package $package" && return
|
||||||
|
|
||||||
|
echo "CI package $PACKAGE"
|
||||||
|
|
||||||
for platform in windows-amd64 windows-arm64 android solaris freebsd linux linux-aarch64; do
|
for platform in windows-amd64 windows-arm64 android solaris freebsd linux linux-aarch64; do
|
||||||
FILENAME="${NAME}-${platform}-${VERSION}.${EXT}"
|
FILENAME="${NAME}-${platform}-${VERSION}.${EXT}"
|
||||||
DOWNLOAD="https://github.com/${REPO}/releases/download/v${VERSION}/${FILENAME}"
|
DOWNLOAD="https://github.com/${REPO}/releases/download/v${VERSION}/${FILENAME}"
|
||||||
PACKAGE_NAME="$PACKAGE"
|
PACKAGE_NAME="$PACKAGE"
|
||||||
KEY=$platform
|
KEY=$platform
|
||||||
echo $DOWNLOAD
|
|
||||||
|
echo "- platform $KEY"
|
||||||
|
|
||||||
|
HASH_ALGO=$(jq -r ".hash_algo" <<< "$JSON")
|
||||||
|
[ "$HASH_ALGO" == null ] && HASH_ALGO=sha512
|
||||||
|
|
||||||
|
HASH_SUFFIX="${HASH_ALGO}sum"
|
||||||
|
HASH_URL="${DOWNLOAD}.${HASH_SUFFIX}"
|
||||||
|
|
||||||
|
HASH=$(curl "$HASH_URL" -sS -q -L -o -)
|
||||||
|
|
||||||
download_package
|
download_package
|
||||||
done
|
done
|
||||||
|
@ -95,7 +108,6 @@ do
|
||||||
JSON=$(find . externals src/yuzu/externals externals/ffmpeg src/dynarmic/externals externals/nx_tzdb -maxdepth 1 -name cpmfile.json -exec jq -r ".\"$package\" | select( . != null )" {} \;)
|
JSON=$(find . externals src/yuzu/externals externals/ffmpeg src/dynarmic/externals externals/nx_tzdb -maxdepth 1 -name cpmfile.json -exec jq -r ".\"$package\" | select( . != null )" {} \;)
|
||||||
|
|
||||||
[ -z "$JSON" ] && echo "No cpmfile definition for $package" && continue
|
[ -z "$JSON" ] && echo "No cpmfile definition for $package" && continue
|
||||||
echo $JSON
|
|
||||||
|
|
||||||
PACKAGE_NAME=$(jq -r ".package" <<< "$JSON")
|
PACKAGE_NAME=$(jq -r ".package" <<< "$JSON")
|
||||||
[ "$PACKAGE_NAME" == null ] && PACKAGE_NAME="$package"
|
[ "$PACKAGE_NAME" == null ] && PACKAGE_NAME="$package"
|
||||||
|
@ -109,6 +121,7 @@ do
|
||||||
# url parsing WOOOHOOHOHOOHOHOH
|
# url parsing WOOOHOOHOHOOHOHOH
|
||||||
URL=$(jq -r ".url" <<< "$JSON")
|
URL=$(jq -r ".url" <<< "$JSON")
|
||||||
REPO=$(jq -r ".repo" <<< "$JSON")
|
REPO=$(jq -r ".repo" <<< "$JSON")
|
||||||
|
SHA=$(jq -r ".sha" <<< "$JSON")
|
||||||
|
|
||||||
if [ "$URL" != "null" ]; then
|
if [ "$URL" != "null" ]; then
|
||||||
DOWNLOAD="$URL"
|
DOWNLOAD="$URL"
|
||||||
|
@ -117,7 +130,6 @@ do
|
||||||
|
|
||||||
TAG=$(jq -r ".tag" <<< "$JSON")
|
TAG=$(jq -r ".tag" <<< "$JSON")
|
||||||
ARTIFACT=$(jq -r ".artifact" <<< "$JSON")
|
ARTIFACT=$(jq -r ".artifact" <<< "$JSON")
|
||||||
SHA=$(jq -r ".sha" <<< "$JSON")
|
|
||||||
BRANCH=$(jq -r ".branch" <<< "$JSON")
|
BRANCH=$(jq -r ".branch" <<< "$JSON")
|
||||||
|
|
||||||
if [ "$TAG" != "null" ]; then
|
if [ "$TAG" != "null" ]; then
|
||||||
|
@ -159,7 +171,9 @@ do
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "$package download URL: $DOWNLOAD, with key $KEY"
|
echo $KEY
|
||||||
|
|
||||||
|
echo "Downloading regular package $package, with key $KEY, from $DOWNLOAD"
|
||||||
|
|
||||||
# hash parsing
|
# hash parsing
|
||||||
HASH_ALGO=$(jq -r ".hash_algo" <<< "$JSON")
|
HASH_ALGO=$(jq -r ".hash_algo" <<< "$JSON")
|
||||||
|
@ -178,10 +192,6 @@ do
|
||||||
HASH=$(curl "$HASH_URL" -L -o -)
|
HASH=$(curl "$HASH_URL" -L -o -)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# TODO(crueter): Hash verification
|
|
||||||
|
|
||||||
echo "$package hash is $HASH"
|
|
||||||
|
|
||||||
download_package
|
download_package
|
||||||
done
|
done
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue