From: drjaska Date: Sat, 3 Jun 2023 12:43:47 +0000 (+0300) Subject: Merge branch 'master' into drjaska/hagardamagenerf X-Git-Url: https://git.rm.cloudns.org/?a=commitdiff_plain;h=refs%2Fmerge-requests%2F1057%2Fhead;p=xonotic%2Fxonotic-data.pk3dir.git Merge branch 'master' into drjaska/hagardamagenerf --- cf0046af7cc3bef6191adf3b2a2ac42e3f6b8b99 diff --cc .gitlab-ci.yml index 93eb37a9b,fc4ce4fff..346355c61 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@@ -1,113 -1,118 +1,118 @@@ - workflow: - rules: - - if: $CI_COMMIT_MESSAGE =~ /Transifex autosync/ - when: never - - when: always - - before_script: - - ln -s $PWD data/xonotic-data.pk3dir - - - export MAKEFLAGS=-j$(nproc); echo MAKEFLAGS=$MAKEFLAGS - # FIXME: -march=native -mtune=native _changes the hash_, why?!? - # - export CC="gcc -pipe -march=native -mtune=native" - - export CC="gcc -pipe" - - - > - if wget -nv https://beta.xonotic.org/pipeline-bin/gmqcc ; then - export QCC="$PWD/gmqcc" - chmod +x "$QCC" - else - git clone --depth=1 --branch=main https://gitlab.com/xonotic/gmqcc.git gmqcc - make -C gmqcc || exit 1 - export QCC="$PWD/gmqcc/gmqcc" - fi - - # Makefile: don't complain about lack of tags (fetching them is slow) - - export QCCFLAGS_WATERMARK=gitlab_pipeline - # Makefile: don't compress anything or complain about lack of zip program - - export ZIP=/bin/true - - test_compilation_units: - rules: - - changes: - - qcsrc/**/* - stage: test - script: - - make test - - test_sv_game: - stage: test - script: - - > - if wget -nv https://beta.xonotic.org/pipeline-bin/xonotic-linux64-dedicated ; then - export ENGINE="$PWD/xonotic-linux64-dedicated" - chmod +x "$ENGINE" - else - git clone --depth=1 --branch=div0-stable https://gitlab.com/xonotic/darkplaces.git darkplaces - make -C darkplaces sv-release || exit 1 - export ENGINE="$PWD/darkplaces/darkplaces-dedicated -xonotic" - fi - - export ENGINE="$ENGINE -noconfig -nohome" - - - make qc || exit 1 - - - mkdir -p data/maps - - wget -nv -O data/maps/_init.bsp https://gitlab.com/xonotic/xonotic-maps.pk3dir/raw/master/maps/_init/_init.bsp - - - while read LINE; do - echo $LINE; - [ "$LINE" = "All tests OK" ] && PASS=1; - done < <(${ENGINE} +developer 1 +map _init +sv_cmd runtest +wait +quit) - - test "$PASS" = "1" || { echo 'sv_cmd runtest failed!'; exit 1; } - - - ${ENGINE} +map _init +sv_cmd dumpnotifs +wait +quit - - diff notifications.cfg data/data/notifications_dump.cfg || - { echo 'Please update notifications.cfg using `dumpnotifs`!'; exit 1; } - - - wget -nv -O data/stormkeep.pk3 http://beta.xonotic.org/autobuild-bsp/latest/stormkeep.pk3 - - wget -nv -O data/maps/stormkeep.mapinfo https://gitlab.com/xonotic/xonotic-maps.pk3dir/raw/master/maps/stormkeep.mapinfo - - wget -nv -O data/maps/stormkeep.waypoints https://gitlab.com/xonotic/xonotic-maps.pk3dir/raw/master/maps/stormkeep.waypoints - - wget -nv -O data/maps/stormkeep.waypoints.cache https://gitlab.com/xonotic/xonotic-maps.pk3dir/raw/master/maps/stormkeep.waypoints.cache - - - EXPECT=db443b02c3b6931759e8cd867243e2b0 - - HASH=$(${ENGINE} +timestamps 1 +exec serverbench.cfg - | tee /dev/stderr - | sed -e 's,^\[[^]]*\] ,,' - | grep '^:' - | grep -v '^:gamestart:' - | grep -v '^:anticheat:' - | md5sum | awk '{ print $1 }') - - echo 'expected:' $EXPECT - - echo ' actual:' $HASH - - test "$HASH" == "$EXPECT" - - exit $? - - - # NOTE: The generated docs are incomplete - they don't contain code behind SVQC CSQC MENUQC GAMEQC ifdefs. - # With them added to PREDEFINED, it would take over half an hour to generate the docs and even then - # they might not be complete. Doxygen doesn't handle #elif and might not understand some QC definitions. - #doxygen: # rename to 'pages' when gitlab.com allows pages to exceed 100MiB - # before_script: - # - ln -s $PWD data/xonotic-data.pk3dir # is this needed? - # - apt-get update - # - apt-get -y install doxygen graphviz - # stage: deploy - # script: - # - cd qcsrc && doxygen - # - mv html ../public - # - mkdir -p ~/.ssh - # - for i in {0..0}; do eval $(printf "echo \$id_rsa_%02d\n" $i) >> ~/.ssh/id_rsa_base64; done - # - base64 --decode ~/.ssh/id_rsa_base64 > ~/.ssh/id_rsa - # - chmod 600 ~/.ssh/id_rsa - # - echo -e "Host *\n\tStrictHostKeyChecking no\n\tLogLevel ERROR\n" >> ~/.ssh/config - # - git config --global user.name "Gitlab CI" - # - git config --global user.email "<>" - # - git clone --single-branch --depth 1 ${DEPLOY_HOST}:${DEPLOY_REPO} ~/deploy_ - # - mkdir ~/deploy && mv ~/deploy_/.git ~/deploy && rm -r ~/deploy_ - # - cp -r ../public/* ~/deploy - # - cd ~/deploy && git add -A . && git commit -m "Deploy ${CI_BUILD_REF}" && git push origin gh-pages - # artifacts: - # paths: - # - public - # only: - # - master + workflow: + rules: + - if: $CI_COMMIT_MESSAGE =~ /Transifex autosync/ + when: never + - when: always + + before_script: + - ln -s $PWD data/xonotic-data.pk3dir + + - export MAKEFLAGS=-j$(nproc); echo MAKEFLAGS=$MAKEFLAGS + # FIXME: -march=native -mtune=native _changes the hash_, why?!? + # - export CC="gcc -pipe -march=native -mtune=native" + - export CC="gcc -pipe" + + - > + if wget -nv https://beta.xonotic.org/pipeline-bin/gmqcc ; then + export QCC="$PWD/gmqcc" + chmod +x "$QCC" + else + git clone --depth=1 --branch=main https://gitlab.com/xonotic/gmqcc.git gmqcc + make -C gmqcc || exit 1 + export QCC="$PWD/gmqcc/gmqcc" + fi + + # Makefile: don't complain about lack of tags (fetching them is slow) + - export QCCFLAGS_WATERMARK=gitlab_pipeline + # Makefile: don't compress anything or complain about lack of zip program + - export ZIP=/bin/true + + test_compilation_units: + rules: + - changes: + - qcsrc/**/* + stage: test + script: + - make test + + test_sv_game: + stage: test + script: + - > + if wget -nv https://beta.xonotic.org/pipeline-bin/xonotic-linux64-dedicated ; then + export ENGINE="$PWD/xonotic-linux64-dedicated" + chmod +x "$ENGINE" + else + git clone --depth=1 --branch=div0-stable https://gitlab.com/xonotic/darkplaces.git darkplaces + make -C darkplaces sv-release || exit 1 + export ENGINE="$PWD/darkplaces/darkplaces-dedicated -xonotic" + fi + - export ENGINE="$ENGINE -noconfig -nohome" + + - make qc || exit 1 + + - mkdir -p data/maps + - wget -nv -O data/maps/_init.bsp https://gitlab.com/xonotic/xonotic-maps.pk3dir/raw/master/maps/_init/_init.bsp + + - while read LINE; do + echo $LINE; + [ "$LINE" = "All tests OK" ] && PASS=1; + done < <(${ENGINE} +developer 1 +map _init +sv_cmd runtest +wait +quit) + - test "$PASS" = "1" || { echo 'sv_cmd runtest failed!'; exit 1; } + + - ${ENGINE} +map _init +sv_cmd dumpnotifs +wait +quit + - diff notifications.cfg data/data/notifications_dump.cfg || + { echo 'Please update notifications.cfg using `dumpnotifs`!'; exit 1; } + + # - wget -nv -O data/stormkeep.pk3 http://beta.xonotic.org/autobuild-bsp/latest/stormkeep.pk3 + # ^^ INCORRECT: /latest/stormkeep.pk3 is the most recently built, not necessarily the one built from master! + # we can't get the one from master directly as there's no /stable/stormkeep.pk3 or /master/stormkeep.pk3 + # and we can't run misc/tools/xonotic-map-compiler-autobuild as it uses commit hashes from xonotic-maps.pk3dir to generate filenames + # but the autobuild server can run it and provide us the resulting pk3: + - wget -nv -O data/stormkeep.pk3 https://beta.xonotic.org/pipeline-bin/stormkeep.pk3 + # see also: misc/infrastructure/xonotic-release-build.cron + - wget -nv -O data/maps/stormkeep.mapinfo https://gitlab.com/xonotic/xonotic-maps.pk3dir/raw/master/maps/stormkeep.mapinfo + - wget -nv -O data/maps/stormkeep.waypoints https://gitlab.com/xonotic/xonotic-maps.pk3dir/raw/master/maps/stormkeep.waypoints + - wget -nv -O data/maps/stormkeep.waypoints.cache https://gitlab.com/xonotic/xonotic-maps.pk3dir/raw/master/maps/stormkeep.waypoints.cache + - - EXPECT=f69ef2a13dbe594a0284660e60bdc903 ++ - EXPECT=0039e0b928f10e60e580ef88a3835bc4 + - HASH=$(${ENGINE} +exec serverbench.cfg + | tee /dev/stderr + | grep '^:' + | grep -v '^:gamestart:' + | grep -v '^:anticheat:' + | md5sum | awk '{ print $1 }') + - echo 'expected:' $EXPECT + - echo ' actual:' $HASH + - test "$HASH" == "$EXPECT" + - exit $? + + + # NOTE: The generated docs are incomplete - they don't contain code behind SVQC CSQC MENUQC GAMEQC ifdefs. + # With them added to PREDEFINED, it would take over half an hour to generate the docs and even then + # they might not be complete. Doxygen doesn't handle #elif and might not understand some QC definitions. + #doxygen: # rename to 'pages' when gitlab.com allows pages to exceed 100MiB + # before_script: + # - ln -s $PWD data/xonotic-data.pk3dir # is this needed? + # - apt-get update + # - apt-get -y install doxygen graphviz + # stage: deploy + # script: + # - cd qcsrc && doxygen + # - mv html ../public + # - mkdir -p ~/.ssh + # - for i in {0..0}; do eval $(printf "echo \$id_rsa_%02d\n" $i) >> ~/.ssh/id_rsa_base64; done + # - base64 --decode ~/.ssh/id_rsa_base64 > ~/.ssh/id_rsa + # - chmod 600 ~/.ssh/id_rsa + # - echo -e "Host *\n\tStrictHostKeyChecking no\n\tLogLevel ERROR\n" >> ~/.ssh/config + # - git config --global user.name "Gitlab CI" + # - git config --global user.email "<>" + # - git clone --single-branch --depth 1 ${DEPLOY_HOST}:${DEPLOY_REPO} ~/deploy_ + # - mkdir ~/deploy && mv ~/deploy_/.git ~/deploy && rm -r ~/deploy_ + # - cp -r ../public/* ~/deploy + # - cd ~/deploy && git add -A . && git commit -m "Deploy ${CI_BUILD_REF}" && git push origin gh-pages + # artifacts: + # paths: + # - public + # only: + # - master