mirror of
https://git.sanhost.net/sanasol/hytale-f2p
synced 2026-02-26 12:51:47 -03:00
Compare commits
1 Commits
44834e7d12
...
v2.2.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e5b44341f1 |
2
.github/CODE_OF_CONDUCT.md
vendored
2
.github/CODE_OF_CONDUCT.md
vendored
@@ -36,7 +36,7 @@ This Code of Conduct applies within all community spaces, and also applies when
|
|||||||
|
|
||||||
## Enforcement
|
## Enforcement
|
||||||
|
|
||||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at [Discord Server, message Founders/Devs](https://discord.gg/Fhbb9Yk5WW). All complaints will be reviewed and investigated promptly and fairly.
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at [Discord Server, message Founders/Devs](https://discord.gg/hf2pdc). All complaints will be reviewed and investigated promptly and fairly.
|
||||||
|
|
||||||
All community leaders are obligated to respect the privacy and security of the reporter of any incident.
|
All community leaders are obligated to respect the privacy and security of the reporter of any incident.
|
||||||
|
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE/support_request.yml
vendored
2
.github/ISSUE_TEMPLATE/support_request.yml
vendored
@@ -22,7 +22,7 @@ body:
|
|||||||
value: |
|
value: |
|
||||||
If you need help or support with using the launcher, please fill out this support request.
|
If you need help or support with using the launcher, please fill out this support request.
|
||||||
Provide as much detail as possible so we can assist you effectively.
|
Provide as much detail as possible so we can assist you effectively.
|
||||||
**Need a quick assistance?** Please Open-A-Ticket in our [Discord Server](https://discord.gg/Fhbb9Yk5WW)!
|
**Need a quick assistance?** Please Open-A-Ticket in our [Discord Server](https://discord.gg/gME8rUy3MB)!
|
||||||
|
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: question
|
id: question
|
||||||
|
|||||||
226
.github/workflows/release.yml
vendored
226
.github/workflows/release.yml
vendored
@@ -6,117 +6,201 @@ on:
|
|||||||
- 'v*'
|
- 'v*'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
env:
|
|
||||||
# Domain for small API calls (goes through Cloudflare - fine for <100MB)
|
|
||||||
FORGEJO_API: https://git.sanhost.net/api/v1
|
|
||||||
# Direct upload URL (bypasses Cloudflare for large files) - set in repo secrets
|
|
||||||
FORGEJO_UPLOAD: ${{ secrets.FORGEJO_UPLOAD_URL }}
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
create-release:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Create Draft Release
|
|
||||||
run: |
|
|
||||||
curl -s -X POST "${FORGEJO_API}/repos/${GITHUB_REPOSITORY}/releases" \
|
|
||||||
-H "Authorization: token ${{ secrets.RELEASE_TOKEN }}" \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d "{\"tag_name\":\"${{ github.ref_name }}\",\"name\":\"${{ github.ref_name }}\",\"body\":\"Release ${{ github.ref_name }}\",\"draft\":true,\"prerelease\":false}" \
|
|
||||||
-o release.json
|
|
||||||
cat release.json
|
|
||||||
echo "RELEASE_ID=$(cat release.json | python3 -c 'import sys,json; print(json.load(sys.stdin)["id"])')" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
build-windows:
|
build-windows:
|
||||||
needs: [create-release]
|
runs-on: windows-latest
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Install Wine for cross-compilation
|
|
||||||
run: |
|
|
||||||
sudo dpkg --add-architecture i386
|
|
||||||
sudo mkdir -pm755 /etc/apt/keyrings
|
|
||||||
sudo wget -O /etc/apt/keyrings/winehq-archive.key https://dl.winehq.org/wine-builds/winehq.key
|
|
||||||
sudo wget -NP /etc/apt/sources.list.d/ https://dl.winehq.org/wine-builds/ubuntu/dists/$(lsb_release -cs)/winehq-$(lsb_release -cs).sources
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y --install-recommends winehq-stable
|
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: '22'
|
node-version: '22'
|
||||||
|
cache: 'npm'
|
||||||
- run: npm ci
|
- run: npm ci
|
||||||
|
|
||||||
- name: Build Windows Packages
|
- name: Create Virtual .env File
|
||||||
run: npx electron-builder --win --publish never --config.npmRebuild=false
|
# Because main.js needed physical env, we need to create virtual one to store it
|
||||||
|
|
||||||
- name: Upload to Release
|
|
||||||
run: |
|
run: |
|
||||||
RELEASE_ID=$(curl -s "${FORGEJO_API}/repos/${GITHUB_REPOSITORY}/releases/tags/${{ github.ref_name }}" \
|
$env_content = @"
|
||||||
-H "Authorization: token ${{ secrets.RELEASE_TOKEN }}" | python3 -c 'import sys,json; print(json.load(sys.stdin)["id"])')
|
HF2P_PROXY_URL=${{ secrets.HF2P_PROXY_URL }}
|
||||||
for file in dist/*.exe dist/*.exe.blockmap dist/latest.yml; do
|
HF2P_SECRET_KEY=${{ secrets.HF2P_SECRET_KEY }}
|
||||||
[ -f "$file" ] || continue
|
"@
|
||||||
echo "Uploading $file..."
|
Set-Content -Path .env -Value $env_content
|
||||||
curl -s --max-time 600 -X POST "${FORGEJO_UPLOAD}/repos/${GITHUB_REPOSITORY}/releases/${RELEASE_ID}/assets?name=$(basename $file)" \
|
|
||||||
-H "Authorization: token ${{ secrets.RELEASE_TOKEN }}" \
|
- name: Build Windows Packages
|
||||||
-F "attachment=@${file}" || echo "Failed to upload $file"
|
run: npx electron-builder --win --publish never
|
||||||
done
|
- uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: windows-builds
|
||||||
|
path: |
|
||||||
|
dist/*.exe
|
||||||
|
dist/*.exe.blockmap
|
||||||
|
dist/latest.yml
|
||||||
|
|
||||||
build-macos:
|
build-macos:
|
||||||
needs: [create-release]
|
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: '22'
|
node-version: '22'
|
||||||
|
cache: 'npm'
|
||||||
- run: npm ci
|
- run: npm ci
|
||||||
|
|
||||||
|
- name: Create Virtual .env File
|
||||||
|
run: |
|
||||||
|
cat << EOF > .env
|
||||||
|
HF2P_PROXY_URL=${{ secrets.HF2P_PROXY_URL }}
|
||||||
|
HF2P_SECRET_KEY=${{ secrets.HF2P_SECRET_KEY }}
|
||||||
|
EOF
|
||||||
|
|
||||||
- name: Build macOS Packages
|
- name: Build macOS Packages
|
||||||
env:
|
env:
|
||||||
|
# Code signing
|
||||||
CSC_LINK: ${{ secrets.CSC_LINK }}
|
CSC_LINK: ${{ secrets.CSC_LINK }}
|
||||||
CSC_KEY_PASSWORD: ${{ secrets.CSC_KEY_PASSWORD }}
|
CSC_KEY_PASSWORD: ${{ secrets.CSC_KEY_PASSWORD }}
|
||||||
|
# Notarization
|
||||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||||
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
|
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
|
||||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||||
run: npx electron-builder --mac --publish never
|
run: npx electron-builder --mac --publish never
|
||||||
|
- uses: actions/upload-artifact@v4
|
||||||
- name: Upload to Release
|
with:
|
||||||
run: |
|
name: macos-builds
|
||||||
RELEASE_ID=$(curl -s "${FORGEJO_API}/repos/${GITHUB_REPOSITORY}/releases/tags/${{ github.ref_name }}" \
|
path: |
|
||||||
-H "Authorization: token ${{ secrets.RELEASE_TOKEN }}" | python3 -c 'import sys,json; print(json.load(sys.stdin)["id"])')
|
dist/*.dmg
|
||||||
for file in dist/*.dmg dist/*.zip dist/*.blockmap dist/latest-mac.yml; do
|
dist/*.zip
|
||||||
[ -f "$file" ] || continue
|
dist/*.blockmap
|
||||||
echo "Uploading $file..."
|
dist/latest-mac.yml
|
||||||
curl -s --max-time 600 -X POST "${FORGEJO_UPLOAD}/repos/${GITHUB_REPOSITORY}/releases/${RELEASE_ID}/assets?name=$(basename $file)" \
|
|
||||||
-H "Authorization: token ${{ secrets.RELEASE_TOKEN }}" \
|
|
||||||
-F "attachment=@${file}" || echo "Failed to upload $file"
|
|
||||||
done
|
|
||||||
|
|
||||||
build-linux:
|
build-linux:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [create-release]
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Install build dependencies
|
- name: Install build dependencies
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y libarchive-tools rpm
|
sudo apt-get install -y libarchive-tools
|
||||||
|
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: '22'
|
node-version: '22'
|
||||||
|
cache: 'npm'
|
||||||
- run: npm ci
|
- run: npm ci
|
||||||
|
|
||||||
- name: Build Linux Packages
|
- name: Create Virtual .env File
|
||||||
run: npx electron-builder --linux AppImage deb rpm pacman --publish never
|
|
||||||
|
|
||||||
- name: Upload to Release
|
|
||||||
run: |
|
run: |
|
||||||
RELEASE_ID=$(curl -s "${FORGEJO_API}/repos/${GITHUB_REPOSITORY}/releases/tags/${{ github.ref_name }}" \
|
cat << EOF > .env
|
||||||
-H "Authorization: token ${{ secrets.RELEASE_TOKEN }}" | python3 -c 'import sys,json; print(json.load(sys.stdin)["id"])')
|
HF2P_PROXY_URL=${{ secrets.HF2P_PROXY_URL }}
|
||||||
for file in dist/*.AppImage dist/*.AppImage.blockmap dist/*.deb dist/*.rpm dist/*.pacman dist/latest-linux.yml; do
|
HF2P_SECRET_KEY=${{ secrets.HF2P_SECRET_KEY }}
|
||||||
[ -f "$file" ] || continue
|
EOF
|
||||||
echo "Uploading $file..."
|
|
||||||
curl -s --max-time 600 -X POST "${FORGEJO_UPLOAD}/repos/${GITHUB_REPOSITORY}/releases/${RELEASE_ID}/assets?name=$(basename $file)" \
|
- name: Build Linux Packages
|
||||||
-H "Authorization: token ${{ secrets.RELEASE_TOKEN }}" \
|
run: |
|
||||||
-F "attachment=@${file}" || echo "Failed to upload $file"
|
npx electron-builder --linux AppImage deb rpm --publish never
|
||||||
done
|
- uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: linux-builds
|
||||||
|
path: |
|
||||||
|
dist/*.AppImage
|
||||||
|
dist/*.AppImage.blockmap
|
||||||
|
dist/*.deb
|
||||||
|
dist/*.rpm
|
||||||
|
dist/latest-linux.yml
|
||||||
|
|
||||||
|
build-arch:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: archlinux:latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Install base packages
|
||||||
|
run: |
|
||||||
|
pacman -Syu --noconfirm
|
||||||
|
pacman -S --noconfirm \
|
||||||
|
base-devel \
|
||||||
|
git \
|
||||||
|
nodejs \
|
||||||
|
npm \
|
||||||
|
rpm-tools \
|
||||||
|
libxcrypt-compat
|
||||||
|
|
||||||
|
- name: Create build user
|
||||||
|
run: |
|
||||||
|
useradd -m builder
|
||||||
|
echo "builder ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||||
|
|
||||||
|
- name: Fix Permissions
|
||||||
|
run: chown -R builder:builder .
|
||||||
|
|
||||||
|
- name: Build Arch Package
|
||||||
|
run: |
|
||||||
|
sudo -u builder bash << 'EOF'
|
||||||
|
set -e
|
||||||
|
|
||||||
|
cat << EOP > .env
|
||||||
|
HF2P_PROXY_URL=${{ secrets.HF2P_PROXY_URL }}
|
||||||
|
HF2P_SECRET_KEY=${{ secrets.HF2P_SECRET_KEY }}
|
||||||
|
EOP
|
||||||
|
|
||||||
|
makepkg --printsrcinfo > .SRCINFO
|
||||||
|
makepkg -s --noconfirm
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Fix permissions for upload
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
sudo chown -R $(id -u):$(id -g) .
|
||||||
|
|
||||||
|
- name: Upload Arch Package
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: arch-package
|
||||||
|
path: |
|
||||||
|
*.pkg.tar.zst
|
||||||
|
.SRCINFO
|
||||||
|
|
||||||
|
release:
|
||||||
|
needs: [build-windows, build-macos, build-linux, build-arch]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: |
|
||||||
|
startsWith(github.ref, 'refs/tags/v') ||
|
||||||
|
github.ref == 'refs/heads/main' ||
|
||||||
|
github.event_name == 'workflow_dispatch'
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download all artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: artifacts
|
||||||
|
|
||||||
|
- name: Display structure of downloaded files
|
||||||
|
run: ls -R artifacts
|
||||||
|
|
||||||
|
- name: Get version from package.json
|
||||||
|
id: pkg_version
|
||||||
|
run: echo "VERSION=$(node -p "require('./package.json').version")" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Create Release
|
||||||
|
uses: softprops/action-gh-release@v2
|
||||||
|
with:
|
||||||
|
tag_name: ${{ github.ref_name }}
|
||||||
|
files: |
|
||||||
|
artifacts/arch-package/*.pkg.tar.zst
|
||||||
|
artifacts/arch-package/.SRCINFO
|
||||||
|
artifacts/linux-builds/**/*
|
||||||
|
artifacts/windows-builds/**/*
|
||||||
|
artifacts/macos-builds/**/*
|
||||||
|
generate_release_notes: true
|
||||||
|
draft: true
|
||||||
|
prerelease: false
|
||||||
|
|
||||||
|
|||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -17,9 +17,6 @@ dist/
|
|||||||
# Project Specific: Downloaded patcher (from hytale-auth-server)
|
# Project Specific: Downloaded patcher (from hytale-auth-server)
|
||||||
backend/patcher/
|
backend/patcher/
|
||||||
|
|
||||||
# Private docs (local only)
|
|
||||||
docs/PATCH_CDN_INFRASTRUCTURE.md
|
|
||||||
|
|
||||||
# macOS Specific
|
# macOS Specific
|
||||||
.DS_Store
|
.DS_Store
|
||||||
*.zst.DS_Store
|
*.zst.DS_Store
|
||||||
|
|||||||
@@ -53,7 +53,7 @@ window.closeDiscordPopup = function() {
|
|||||||
};
|
};
|
||||||
|
|
||||||
window.joinDiscord = async function() {
|
window.joinDiscord = async function() {
|
||||||
await window.electronAPI?.openExternal('https://discord.gg/Fhbb9Yk5WW');
|
await window.electronAPI?.openExternal('https://discord.gg/hf2pdc');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await window.electronAPI?.saveConfig({ discordPopup: true });
|
await window.electronAPI?.saveConfig({ discordPopup: true });
|
||||||
|
|||||||
@@ -1103,7 +1103,7 @@ function getRetryContextMessage() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
window.openDiscordExternal = function() {
|
window.openDiscordExternal = function() {
|
||||||
window.electronAPI?.openExternal('https://discord.gg/Fhbb9Yk5WW');
|
window.electronAPI?.openExternal('https://discord.gg/hf2pdc');
|
||||||
};
|
};
|
||||||
|
|
||||||
window.toggleMaximize = toggleMaximize;
|
window.toggleMaximize = toggleMaximize;
|
||||||
|
|||||||
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
### ⚠️ **WARNING: READ [QUICK START](#-quick-start) before Downloading & Installing the Launcher!** ⚠️
|
### ⚠️ **WARNING: READ [QUICK START](#-quick-start) before Downloading & Installing the Launcher!** ⚠️
|
||||||
|
|
||||||
#### 🛑 **Found a problem? [Join the HF2P Discord](https://discord.gg/Fhbb9Yk5WW) and head to `#-⚠️-community-help`** 🛑
|
#### 🛑 **Found a problem? [Join the HF2P Discord](https://discord.gg/hf2pdc) and head to `#-⚠️-community-help`** 🛑
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
👍 If you like the project, <b>feel free to support us via Buy Me a Coffee!</b> ☕<br>
|
👍 If you like the project, <b>feel free to support us via Buy Me a Coffee!</b> ☕<br>
|
||||||
@@ -455,7 +455,7 @@ See [BUILD.md](docs/BUILD.md) for comprehensive build instructions.
|
|||||||
<div align="center">
|
<div align="center">
|
||||||
|
|
||||||
**Questions? Ads? Collaboration? Endorsement? Other business-related?**
|
**Questions? Ads? Collaboration? Endorsement? Other business-related?**
|
||||||
Message the founders at https://discord.gg/Fhbb9Yk5WW
|
Message the founders at https://discord.gg/hf2pdc
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
Play with friends online! This guide covers both easy in-game hosting and advanced dedicated server setup.
|
Play with friends online! This guide covers both easy in-game hosting and advanced dedicated server setup.
|
||||||
|
|
||||||
### **DOWNLOAD SERVER FILES (JAR/RAR/SCRIPTS) HERE: https://discord.gg/Fhbb9Yk5WW**
|
### **DOWNLOAD SERVER FILES (JAR/RAR/SCRIPTS) HERE: https://discord.gg/hf2pdc**
|
||||||
|
|
||||||
**Table of Contents**
|
**Table of Contents**
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Hytale F2P Launcher - Troubleshooting Guide
|
# Hytale F2P Launcher - Troubleshooting Guide
|
||||||
|
|
||||||
This guide covers common issues and their solutions. If your issue isn't listed here, please check [existing issues](https://github.com/amiayweb/Hytale-F2P/issues) or join our [Discord](https://discord.gg/Fhbb9Yk5WW).
|
This guide covers common issues and their solutions. If your issue isn't listed here, please check [existing issues](https://github.com/amiayweb/Hytale-F2P/issues) or join our [Discord](https://discord.gg/gME8rUy3MB).
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -437,7 +437,7 @@ Game sessions have a 10-hour TTL. This is by design for security.
|
|||||||
If your issue isn't resolved by this guide:
|
If your issue isn't resolved by this guide:
|
||||||
|
|
||||||
1. **Check existing issues:** [GitHub Issues](https://github.com/amiayweb/Hytale-F2P/issues)
|
1. **Check existing issues:** [GitHub Issues](https://github.com/amiayweb/Hytale-F2P/issues)
|
||||||
2. **Join Discord:** [discord.gg/Fhbb9Yk5WW](https://discord.gg/Fhbb9Yk5WW)
|
2. **Join Discord:** [discord.gg/gME8rUy3MB](https://discord.gg/gME8rUy3MB)
|
||||||
3. **Open a new issue** with:
|
3. **Open a new issue** with:
|
||||||
- Your operating system and version
|
- Your operating system and version
|
||||||
- Launcher version
|
- Launcher version
|
||||||
|
|||||||
@@ -4,10 +4,6 @@ const logger = require('./logger');
|
|||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const os = require('os');
|
const os = require('os');
|
||||||
const https = require('https');
|
|
||||||
|
|
||||||
const FORGEJO_API = 'https://git.sanhost.net/api/v1';
|
|
||||||
const FORGEJO_REPO = 'sanasol/hytale-f2p';
|
|
||||||
|
|
||||||
class AppUpdater {
|
class AppUpdater {
|
||||||
constructor(mainWindow) {
|
constructor(mainWindow) {
|
||||||
@@ -18,34 +14,6 @@ class AppUpdater {
|
|||||||
this.setupAutoUpdater();
|
this.setupAutoUpdater();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetch the latest non-draft release tag from Forgejo and set the feed URL
|
|
||||||
*/
|
|
||||||
async _resolveUpdateUrl() {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
https.get(`${FORGEJO_API}/repos/${FORGEJO_REPO}/releases?limit=5`, (res) => {
|
|
||||||
let data = '';
|
|
||||||
res.on('data', (chunk) => data += chunk);
|
|
||||||
res.on('end', () => {
|
|
||||||
try {
|
|
||||||
const releases = JSON.parse(data);
|
|
||||||
const latest = releases.find(r => !r.draft && !r.prerelease);
|
|
||||||
if (latest) {
|
|
||||||
const url = `https://git.sanhost.net/${FORGEJO_REPO}/releases/download/${latest.tag_name}`;
|
|
||||||
console.log(`Auto-update URL resolved to: ${url}`);
|
|
||||||
autoUpdater.setFeedURL({ provider: 'generic', url });
|
|
||||||
resolve(url);
|
|
||||||
} else {
|
|
||||||
reject(new Error('No published release found'));
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
reject(e);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}).on('error', reject);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
setupAutoUpdater() {
|
setupAutoUpdater() {
|
||||||
|
|
||||||
// Configure logger for electron-updater
|
// Configure logger for electron-updater
|
||||||
@@ -248,10 +216,8 @@ class AppUpdater {
|
|||||||
}
|
}
|
||||||
|
|
||||||
checkForUpdatesAndNotify() {
|
checkForUpdatesAndNotify() {
|
||||||
// Resolve latest release URL then check for updates
|
// Check for updates and notify if available
|
||||||
this._resolveUpdateUrl().catch(err => {
|
autoUpdater.checkForUpdatesAndNotify().catch(err => {
|
||||||
console.warn('Failed to resolve update URL:', err.message);
|
|
||||||
}).then(() => autoUpdater.checkForUpdatesAndNotify()).catch(err => {
|
|
||||||
console.error('Failed to check for updates:', err);
|
console.error('Failed to check for updates:', err);
|
||||||
|
|
||||||
// Network errors are not critical - just log and continue
|
// Network errors are not critical - just log and continue
|
||||||
@@ -279,10 +245,8 @@ class AppUpdater {
|
|||||||
}
|
}
|
||||||
|
|
||||||
checkForUpdates() {
|
checkForUpdates() {
|
||||||
// Manual check - resolve latest release URL first
|
// Manual check for updates (returns promise)
|
||||||
return this._resolveUpdateUrl().catch(err => {
|
return autoUpdater.checkForUpdates().catch(err => {
|
||||||
console.warn('Failed to resolve update URL:', err.message);
|
|
||||||
}).then(() => autoUpdater.checkForUpdates()).catch(err => {
|
|
||||||
console.error('Failed to check for updates:', err);
|
console.error('Failed to check for updates:', err);
|
||||||
|
|
||||||
// Network errors are not critical - just return no update available
|
// Network errors are not critical - just return no update available
|
||||||
|
|||||||
@@ -54,7 +54,6 @@ function getAppDir() {
|
|||||||
const CONFIG_FILE = path.join(getAppDir(), 'config.json');
|
const CONFIG_FILE = path.join(getAppDir(), 'config.json');
|
||||||
const CONFIG_BACKUP = path.join(getAppDir(), 'config.json.bak');
|
const CONFIG_BACKUP = path.join(getAppDir(), 'config.json.bak');
|
||||||
const CONFIG_TEMP = path.join(getAppDir(), 'config.json.tmp');
|
const CONFIG_TEMP = path.join(getAppDir(), 'config.json.tmp');
|
||||||
const UUID_STORE_FILE = path.join(getAppDir(), 'uuid-store.json');
|
|
||||||
|
|
||||||
// =============================================================================
|
// =============================================================================
|
||||||
// CONFIG VALIDATION
|
// CONFIG VALIDATION
|
||||||
@@ -153,22 +152,6 @@ function saveConfig(update) {
|
|||||||
|
|
||||||
// Load current config
|
// Load current config
|
||||||
const currentConfig = loadConfig();
|
const currentConfig = loadConfig();
|
||||||
|
|
||||||
// SAFETY: If config file exists on disk but loadConfig() returned empty,
|
|
||||||
// something is wrong (file locked, corrupted, etc.). Refuse to save
|
|
||||||
// because merging with {} would wipe all existing data (userUuids, username, etc.)
|
|
||||||
if (Object.keys(currentConfig).length === 0 && fs.existsSync(CONFIG_FILE)) {
|
|
||||||
const fileSize = fs.statSync(CONFIG_FILE).size;
|
|
||||||
if (fileSize > 2) { // More than just "{}"
|
|
||||||
console.error(`[Config] REFUSING to save — loaded empty but file exists (${fileSize} bytes). Retrying load...`);
|
|
||||||
// Wait and retry the load
|
|
||||||
const delay = attempt * 200;
|
|
||||||
const start = Date.now();
|
|
||||||
while (Date.now() - start < delay) { /* busy wait */ }
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const newConfig = { ...currentConfig, ...update };
|
const newConfig = { ...currentConfig, ...update };
|
||||||
const data = JSON.stringify(newConfig, null, 2);
|
const data = JSON.stringify(newConfig, null, 2);
|
||||||
|
|
||||||
@@ -255,18 +238,11 @@ function saveUsername(username) {
|
|||||||
// Check if we're actually changing the username (case-insensitive comparison)
|
// Check if we're actually changing the username (case-insensitive comparison)
|
||||||
const isRename = currentName && currentName.toLowerCase() !== newName.toLowerCase();
|
const isRename = currentName && currentName.toLowerCase() !== newName.toLowerCase();
|
||||||
|
|
||||||
// Also update UUID store (source of truth)
|
|
||||||
migrateUuidStoreIfNeeded();
|
|
||||||
const uuidStore = loadUuidStore();
|
|
||||||
|
|
||||||
if (isRename) {
|
if (isRename) {
|
||||||
// Find the UUID for the current username
|
// Find the UUID for the current username
|
||||||
const currentKey = Object.keys(userUuids).find(
|
const currentKey = Object.keys(userUuids).find(
|
||||||
k => k.toLowerCase() === currentName.toLowerCase()
|
k => k.toLowerCase() === currentName.toLowerCase()
|
||||||
);
|
);
|
||||||
const currentStoreKey = Object.keys(uuidStore).find(
|
|
||||||
k => k.toLowerCase() === currentName.toLowerCase()
|
|
||||||
);
|
|
||||||
|
|
||||||
if (currentKey && userUuids[currentKey]) {
|
if (currentKey && userUuids[currentKey]) {
|
||||||
// Check if target username already exists (would be a different identity)
|
// Check if target username already exists (would be a different identity)
|
||||||
@@ -282,9 +258,6 @@ function saveUsername(username) {
|
|||||||
const uuid = userUuids[currentKey];
|
const uuid = userUuids[currentKey];
|
||||||
delete userUuids[currentKey];
|
delete userUuids[currentKey];
|
||||||
userUuids[newName] = uuid;
|
userUuids[newName] = uuid;
|
||||||
// Same in UUID store
|
|
||||||
if (currentStoreKey) delete uuidStore[currentStoreKey];
|
|
||||||
uuidStore[newName] = uuid;
|
|
||||||
console.log(`[Config] Renamed identity: "${currentKey}" → "${newName}" (UUID preserved: ${uuid})`);
|
console.log(`[Config] Renamed identity: "${currentKey}" → "${newName}" (UUID preserved: ${uuid})`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -297,20 +270,11 @@ function saveUsername(username) {
|
|||||||
const uuid = userUuids[currentKey];
|
const uuid = userUuids[currentKey];
|
||||||
delete userUuids[currentKey];
|
delete userUuids[currentKey];
|
||||||
userUuids[newName] = uuid;
|
userUuids[newName] = uuid;
|
||||||
// Same in UUID store
|
|
||||||
const storeKey = Object.keys(uuidStore).find(k => k.toLowerCase() === currentName.toLowerCase());
|
|
||||||
if (storeKey) {
|
|
||||||
delete uuidStore[storeKey];
|
|
||||||
uuidStore[newName] = uuid;
|
|
||||||
}
|
|
||||||
console.log(`[Config] Updated username case: "${currentKey}" → "${newName}"`);
|
console.log(`[Config] Updated username case: "${currentKey}" → "${newName}"`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save UUID store
|
// Save both username and updated userUuids
|
||||||
saveUuidStore(uuidStore);
|
|
||||||
|
|
||||||
// Save both username and updated userUuids to config
|
|
||||||
saveConfig({ username: newName, userUuids });
|
saveConfig({ username: newName, userUuids });
|
||||||
console.log(`[Config] Username saved: "${newName}"`);
|
console.log(`[Config] Username saved: "${newName}"`);
|
||||||
return newName;
|
return newName;
|
||||||
@@ -346,7 +310,6 @@ function hasUsername() {
|
|||||||
|
|
||||||
// =============================================================================
|
// =============================================================================
|
||||||
// UUID MANAGEMENT - Persistent and safe
|
// UUID MANAGEMENT - Persistent and safe
|
||||||
// Uses separate uuid-store.json as source of truth (survives config.json corruption)
|
|
||||||
// =============================================================================
|
// =============================================================================
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -357,55 +320,10 @@ function normalizeUsername(username) {
|
|||||||
return username.trim().toLowerCase();
|
return username.trim().toLowerCase();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Load UUID store from separate file (independent of config.json)
|
|
||||||
*/
|
|
||||||
function loadUuidStore() {
|
|
||||||
try {
|
|
||||||
if (fs.existsSync(UUID_STORE_FILE)) {
|
|
||||||
const data = fs.readFileSync(UUID_STORE_FILE, 'utf8');
|
|
||||||
if (data.trim()) {
|
|
||||||
return JSON.parse(data);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.error('[UUID Store] Failed to load:', err.message);
|
|
||||||
}
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Save UUID store to separate file (atomic write)
|
|
||||||
*/
|
|
||||||
function saveUuidStore(store) {
|
|
||||||
try {
|
|
||||||
const dir = path.dirname(UUID_STORE_FILE);
|
|
||||||
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
|
|
||||||
const tmpFile = UUID_STORE_FILE + '.tmp';
|
|
||||||
fs.writeFileSync(tmpFile, JSON.stringify(store, null, 2), 'utf8');
|
|
||||||
fs.renameSync(tmpFile, UUID_STORE_FILE);
|
|
||||||
} catch (err) {
|
|
||||||
console.error('[UUID Store] Failed to save:', err.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* One-time migration: copy userUuids from config.json to uuid-store.json
|
|
||||||
*/
|
|
||||||
function migrateUuidStoreIfNeeded() {
|
|
||||||
if (fs.existsSync(UUID_STORE_FILE)) return; // Already migrated
|
|
||||||
const config = loadConfig();
|
|
||||||
if (config.userUuids && Object.keys(config.userUuids).length > 0) {
|
|
||||||
console.log('[UUID Store] Migrating', Object.keys(config.userUuids).length, 'UUIDs from config.json');
|
|
||||||
saveUuidStore(config.userUuids);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get UUID for a username
|
* Get UUID for a username
|
||||||
* Source of truth: uuid-store.json (separate from config.json)
|
* Creates new UUID only if user explicitly doesn't exist
|
||||||
* Also writes to config.json for backward compatibility
|
* Uses case-insensitive lookup to prevent duplicates, but preserves original case for display
|
||||||
* Creates new UUID only if user doesn't exist in EITHER store
|
|
||||||
*/
|
*/
|
||||||
function getUuidForUser(username) {
|
function getUuidForUser(username) {
|
||||||
const { v4: uuidv4 } = require('uuid');
|
const { v4: uuidv4 } = require('uuid');
|
||||||
@@ -417,69 +335,32 @@ function getUuidForUser(username) {
|
|||||||
const displayName = username.trim();
|
const displayName = username.trim();
|
||||||
const normalizedLookup = displayName.toLowerCase();
|
const normalizedLookup = displayName.toLowerCase();
|
||||||
|
|
||||||
// Ensure UUID store exists (one-time migration from config.json)
|
|
||||||
migrateUuidStoreIfNeeded();
|
|
||||||
|
|
||||||
// 1. Check UUID store first (source of truth)
|
|
||||||
const uuidStore = loadUuidStore();
|
|
||||||
const storeKey = Object.keys(uuidStore).find(k => k.toLowerCase() === normalizedLookup);
|
|
||||||
|
|
||||||
if (storeKey) {
|
|
||||||
const existingUuid = uuidStore[storeKey];
|
|
||||||
|
|
||||||
// Update case if needed
|
|
||||||
if (storeKey !== displayName) {
|
|
||||||
console.log(`[UUID Store] Updating username case: "${storeKey}" → "${displayName}"`);
|
|
||||||
delete uuidStore[storeKey];
|
|
||||||
uuidStore[displayName] = existingUuid;
|
|
||||||
saveUuidStore(uuidStore);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sync to config.json (backward compat, non-critical)
|
|
||||||
try {
|
|
||||||
const config = loadConfig();
|
const config = loadConfig();
|
||||||
const configUuids = config.userUuids || {};
|
const userUuids = config.userUuids || {};
|
||||||
const configKey = Object.keys(configUuids).find(k => k.toLowerCase() === normalizedLookup);
|
|
||||||
if (!configKey || configUuids[configKey] !== existingUuid) {
|
// Case-insensitive lookup - find existing key regardless of case
|
||||||
if (configKey) delete configUuids[configKey];
|
const existingKey = Object.keys(userUuids).find(k => k.toLowerCase() === normalizedLookup);
|
||||||
configUuids[displayName] = existingUuid;
|
|
||||||
saveConfig({ userUuids: configUuids });
|
if (existingKey) {
|
||||||
}
|
// Found existing - return UUID, update display name if case changed
|
||||||
} catch (e) {
|
const existingUuid = userUuids[existingKey];
|
||||||
// Non-critical — UUID store is the source of truth
|
|
||||||
|
// If user typed different case, update the key to new case (preserving UUID)
|
||||||
|
if (existingKey !== displayName) {
|
||||||
|
console.log(`[Config] Updating username case: "${existingKey}" → "${displayName}"`);
|
||||||
|
delete userUuids[existingKey];
|
||||||
|
userUuids[displayName] = existingUuid;
|
||||||
|
saveConfig({ userUuids });
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`[UUID] ${displayName} → ${existingUuid} (from uuid-store)`);
|
|
||||||
return existingUuid;
|
return existingUuid;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2. Fallback: check config.json (recovery if uuid-store.json was lost)
|
// Create new UUID for new user - store with original case
|
||||||
const config = loadConfig();
|
|
||||||
const userUuids = config.userUuids || {};
|
|
||||||
const configKey = Object.keys(userUuids).find(k => k.toLowerCase() === normalizedLookup);
|
|
||||||
|
|
||||||
if (configKey) {
|
|
||||||
const recoveredUuid = userUuids[configKey];
|
|
||||||
console.warn(`[UUID] RECOVERED "${displayName}" → ${recoveredUuid} from config.json (uuid-store was missing)`);
|
|
||||||
|
|
||||||
// Save to UUID store
|
|
||||||
uuidStore[displayName] = recoveredUuid;
|
|
||||||
saveUuidStore(uuidStore);
|
|
||||||
|
|
||||||
return recoveredUuid;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 3. New user — generate UUID, save to BOTH stores
|
|
||||||
const newUuid = uuidv4();
|
const newUuid = uuidv4();
|
||||||
console.log(`[UUID] NEW user "${displayName}" → ${newUuid}`);
|
|
||||||
|
|
||||||
// Save to UUID store (source of truth)
|
|
||||||
uuidStore[displayName] = newUuid;
|
|
||||||
saveUuidStore(uuidStore);
|
|
||||||
|
|
||||||
// Save to config.json (backward compat)
|
|
||||||
userUuids[displayName] = newUuid;
|
userUuids[displayName] = newUuid;
|
||||||
saveConfig({ userUuids });
|
saveConfig({ userUuids });
|
||||||
|
console.log(`[Config] Created new UUID for "${displayName}": ${newUuid}`);
|
||||||
|
|
||||||
return newUuid;
|
return newUuid;
|
||||||
}
|
}
|
||||||
@@ -499,26 +380,22 @@ function getCurrentUuid() {
|
|||||||
* Get all UUID mappings (raw object)
|
* Get all UUID mappings (raw object)
|
||||||
*/
|
*/
|
||||||
function getAllUuidMappings() {
|
function getAllUuidMappings() {
|
||||||
migrateUuidStoreIfNeeded();
|
|
||||||
const uuidStore = loadUuidStore();
|
|
||||||
// Fallback to config if uuid-store is empty
|
|
||||||
if (Object.keys(uuidStore).length === 0) {
|
|
||||||
const config = loadConfig();
|
const config = loadConfig();
|
||||||
return config.userUuids || {};
|
return config.userUuids || {};
|
||||||
}
|
|
||||||
return uuidStore;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get all UUID mappings as array with current user flag
|
* Get all UUID mappings as array with current user flag
|
||||||
*/
|
*/
|
||||||
function getAllUuidMappingsArray() {
|
function getAllUuidMappingsArray() {
|
||||||
const allMappings = getAllUuidMappings();
|
const config = loadConfig();
|
||||||
|
const userUuids = config.userUuids || {};
|
||||||
const currentUsername = loadUsername();
|
const currentUsername = loadUsername();
|
||||||
|
// Case-insensitive comparison for isCurrent
|
||||||
const normalizedCurrent = currentUsername ? currentUsername.toLowerCase() : null;
|
const normalizedCurrent = currentUsername ? currentUsername.toLowerCase() : null;
|
||||||
|
|
||||||
return Object.entries(allMappings).map(([username, uuid]) => ({
|
return Object.entries(userUuids).map(([username, uuid]) => ({
|
||||||
username,
|
username, // Original case preserved
|
||||||
uuid,
|
uuid,
|
||||||
isCurrent: username.toLowerCase() === normalizedCurrent
|
isCurrent: username.toLowerCase() === normalizedCurrent
|
||||||
}));
|
}));
|
||||||
@@ -542,20 +419,16 @@ function setUuidForUser(username, uuid) {
|
|||||||
|
|
||||||
const displayName = username.trim();
|
const displayName = username.trim();
|
||||||
const normalizedLookup = displayName.toLowerCase();
|
const normalizedLookup = displayName.toLowerCase();
|
||||||
|
|
||||||
// 1. Update UUID store (source of truth)
|
|
||||||
migrateUuidStoreIfNeeded();
|
|
||||||
const uuidStore = loadUuidStore();
|
|
||||||
const storeKey = Object.keys(uuidStore).find(k => k.toLowerCase() === normalizedLookup);
|
|
||||||
if (storeKey) delete uuidStore[storeKey];
|
|
||||||
uuidStore[displayName] = uuid;
|
|
||||||
saveUuidStore(uuidStore);
|
|
||||||
|
|
||||||
// 2. Update config.json (backward compat)
|
|
||||||
const config = loadConfig();
|
const config = loadConfig();
|
||||||
const userUuids = config.userUuids || {};
|
const userUuids = config.userUuids || {};
|
||||||
|
|
||||||
|
// Remove any existing entry with same name (case-insensitive)
|
||||||
const existingKey = Object.keys(userUuids).find(k => k.toLowerCase() === normalizedLookup);
|
const existingKey = Object.keys(userUuids).find(k => k.toLowerCase() === normalizedLookup);
|
||||||
if (existingKey) delete userUuids[existingKey];
|
if (existingKey) {
|
||||||
|
delete userUuids[existingKey];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store with original case
|
||||||
userUuids[displayName] = uuid;
|
userUuids[displayName] = uuid;
|
||||||
saveConfig({ userUuids });
|
saveConfig({ userUuids });
|
||||||
|
|
||||||
@@ -581,30 +454,20 @@ function deleteUuidForUser(username) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const normalizedLookup = username.trim().toLowerCase();
|
const normalizedLookup = username.trim().toLowerCase();
|
||||||
let deleted = false;
|
|
||||||
|
|
||||||
// 1. Delete from UUID store (source of truth)
|
|
||||||
migrateUuidStoreIfNeeded();
|
|
||||||
const uuidStore = loadUuidStore();
|
|
||||||
const storeKey = Object.keys(uuidStore).find(k => k.toLowerCase() === normalizedLookup);
|
|
||||||
if (storeKey) {
|
|
||||||
delete uuidStore[storeKey];
|
|
||||||
saveUuidStore(uuidStore);
|
|
||||||
deleted = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. Delete from config.json (backward compat)
|
|
||||||
const config = loadConfig();
|
const config = loadConfig();
|
||||||
const userUuids = config.userUuids || {};
|
const userUuids = config.userUuids || {};
|
||||||
|
|
||||||
|
// Case-insensitive lookup
|
||||||
const existingKey = Object.keys(userUuids).find(k => k.toLowerCase() === normalizedLookup);
|
const existingKey = Object.keys(userUuids).find(k => k.toLowerCase() === normalizedLookup);
|
||||||
|
|
||||||
if (existingKey) {
|
if (existingKey) {
|
||||||
delete userUuids[existingKey];
|
delete userUuids[existingKey];
|
||||||
saveConfig({ userUuids });
|
saveConfig({ userUuids });
|
||||||
deleted = true;
|
console.log(`[Config] UUID deleted for "${username}"`);
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (deleted) console.log(`[Config] UUID deleted for "${username}"`);
|
return false;
|
||||||
return deleted;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -925,6 +788,5 @@ module.exports = {
|
|||||||
loadVersionBranch,
|
loadVersionBranch,
|
||||||
|
|
||||||
// Constants
|
// Constants
|
||||||
CONFIG_FILE,
|
CONFIG_FILE
|
||||||
UUID_STORE_FILE
|
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ const path = require('path');
|
|||||||
const { execFile } = require('child_process');
|
const { execFile } = require('child_process');
|
||||||
const { downloadFile, retryDownload } = require('../utils/fileManager');
|
const { downloadFile, retryDownload } = require('../utils/fileManager');
|
||||||
const { getOS, getArch } = require('../utils/platformUtils');
|
const { getOS, getArch } = require('../utils/platformUtils');
|
||||||
const { validateChecksum, extractVersionDetails, getInstalledClientVersion, getUpdatePlan, extractVersionNumber, getAllMirrorUrls, getPatchesBaseUrl } = require('../services/versionManager');
|
const { validateChecksum, extractVersionDetails, canUseDifferentialUpdate, needsIntermediatePatches, getInstalledClientVersion } = require('../services/versionManager');
|
||||||
const { installButler } = require('./butlerManager');
|
const { installButler } = require('./butlerManager');
|
||||||
const { GAME_DIR, CACHE_DIR, TOOLS_DIR } = require('../core/paths');
|
const { GAME_DIR, CACHE_DIR, TOOLS_DIR } = require('../core/paths');
|
||||||
const { saveVersionClient } = require('../core/config');
|
const { saveVersionClient } = require('../core/config');
|
||||||
@@ -31,62 +31,15 @@ async function acquireGameArchive(downloadUrl, targetPath, checksum, progressCal
|
|||||||
|
|
||||||
console.log(`Downloading game archive from: ${downloadUrl}`);
|
console.log(`Downloading game archive from: ${downloadUrl}`);
|
||||||
|
|
||||||
// Try primary URL first, then mirror URLs on timeout/connection failure
|
|
||||||
const mirrors = await getAllMirrorUrls();
|
|
||||||
const primaryBase = await getPatchesBaseUrl();
|
|
||||||
const urlsToTry = [downloadUrl];
|
|
||||||
|
|
||||||
// Build mirror URLs by replacing the base URL
|
|
||||||
for (const mirror of mirrors) {
|
|
||||||
if (mirror !== primaryBase && downloadUrl.startsWith(primaryBase)) {
|
|
||||||
const mirrorUrl = downloadUrl.replace(primaryBase, mirror);
|
|
||||||
if (!urlsToTry.includes(mirrorUrl)) {
|
|
||||||
urlsToTry.push(mirrorUrl);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let lastError;
|
|
||||||
for (let i = 0; i < urlsToTry.length; i++) {
|
|
||||||
const url = urlsToTry[i];
|
|
||||||
try {
|
try {
|
||||||
if (i > 0) {
|
|
||||||
console.log(`[Download] Trying mirror ${i}: ${url}`);
|
|
||||||
if (progressCallback) {
|
|
||||||
progressCallback(`Trying alternative mirror (${i}/${urlsToTry.length - 1})...`, 0, null, null, null);
|
|
||||||
}
|
|
||||||
// Clean up partial download from previous attempt
|
|
||||||
if (fs.existsSync(targetPath)) {
|
|
||||||
try { fs.unlinkSync(targetPath); } catch (e) {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (allowRetry) {
|
if (allowRetry) {
|
||||||
await retryDownload(url, targetPath, progressCallback);
|
await retryDownload(downloadUrl, targetPath, progressCallback);
|
||||||
} else {
|
} else {
|
||||||
await downloadFile(url, targetPath, progressCallback);
|
await downloadFile(downloadUrl, targetPath, progressCallback);
|
||||||
}
|
}
|
||||||
lastError = null;
|
|
||||||
break; // Success
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
lastError = error;
|
const enhancedError = new Error(`Archive download failed: ${error.message}`);
|
||||||
const isConnectionError = error.message && (
|
enhancedError.originalError = error;
|
||||||
error.message.includes('ETIMEDOUT') ||
|
|
||||||
error.message.includes('ECONNREFUSED') ||
|
|
||||||
error.message.includes('ECONNABORTED') ||
|
|
||||||
error.message.includes('timeout')
|
|
||||||
);
|
|
||||||
if (isConnectionError && i < urlsToTry.length - 1) {
|
|
||||||
console.warn(`[Download] Connection failed (${error.message}), will try mirror...`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
// Non-connection error or last mirror — throw
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (lastError) {
|
|
||||||
const enhancedError = new Error(`Archive download failed: ${lastError.message}`);
|
|
||||||
enhancedError.originalError = lastError;
|
|
||||||
enhancedError.downloadUrl = downloadUrl;
|
enhancedError.downloadUrl = downloadUrl;
|
||||||
enhancedError.targetPath = targetPath;
|
enhancedError.targetPath = targetPath;
|
||||||
throw enhancedError;
|
throw enhancedError;
|
||||||
@@ -203,15 +156,15 @@ async function performIntelligentUpdate(targetVersion, branch = 'release', progr
|
|||||||
console.log(`Initiating intelligent update to version ${targetVersion}`);
|
console.log(`Initiating intelligent update to version ${targetVersion}`);
|
||||||
|
|
||||||
const currentVersion = getInstalledClientVersion();
|
const currentVersion = getInstalledClientVersion();
|
||||||
const currentBuild = extractVersionNumber(currentVersion) || 0;
|
console.log(`Current version: ${currentVersion || 'none (clean install)'}`);
|
||||||
const targetBuild = extractVersionNumber(targetVersion);
|
console.log(`Target version: ${targetVersion}`);
|
||||||
console.log(`Current build: ${currentBuild}, Target build: ${targetBuild}, Branch: ${branch}`);
|
console.log(`Branch: ${branch}`);
|
||||||
|
|
||||||
// For non-release branches, always do full install
|
|
||||||
if (branch !== 'release') {
|
if (branch !== 'release') {
|
||||||
console.log('Pre-release branch detected - forcing full archive download');
|
console.log(`Pre-release branch detected - forcing full archive download`);
|
||||||
const versionDetails = await extractVersionDetails(targetVersion, branch);
|
const versionDetails = await extractVersionDetails(targetVersion, branch);
|
||||||
const archivePath = path.join(cacheDir, `${branch}_0_to_${targetBuild}.pwr`);
|
const archiveName = path.basename(versionDetails.fullUrl);
|
||||||
|
const archivePath = path.join(cacheDir, `${branch}_${archiveName}`);
|
||||||
|
|
||||||
if (progressCallback) {
|
if (progressCallback) {
|
||||||
progressCallback('Downloading full game archive (pre-release)...', 0, null, null, null);
|
progressCallback('Downloading full game archive (pre-release)...', 0, null, null, null);
|
||||||
@@ -224,14 +177,14 @@ async function performIntelligentUpdate(targetVersion, branch = 'release', progr
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clean install (no current version)
|
if (!currentVersion) {
|
||||||
if (currentBuild === 0) {
|
|
||||||
console.log('No existing installation detected - downloading full archive');
|
console.log('No existing installation detected - downloading full archive');
|
||||||
const versionDetails = await extractVersionDetails(targetVersion, branch);
|
const versionDetails = await extractVersionDetails(targetVersion, branch);
|
||||||
const archivePath = path.join(cacheDir, `${branch}_0_to_${targetBuild}.pwr`);
|
const archiveName = path.basename(versionDetails.fullUrl);
|
||||||
|
const archivePath = path.join(cacheDir, `${branch}_${archiveName}`);
|
||||||
|
|
||||||
if (progressCallback) {
|
if (progressCallback) {
|
||||||
progressCallback(`Downloading full game archive (first install - v${targetBuild})...`, 0, null, null, null);
|
progressCallback(`Downloading full game archive (first install - v${targetVersion})...`, 0, null, null, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
await acquireGameArchive(versionDetails.fullUrl, archivePath, null, progressCallback);
|
await acquireGameArchive(versionDetails.fullUrl, archivePath, null, progressCallback);
|
||||||
@@ -241,67 +194,59 @@ async function performIntelligentUpdate(targetVersion, branch = 'release', progr
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Already at target
|
const patchesToApply = needsIntermediatePatches(currentVersion, targetVersion);
|
||||||
if (currentBuild >= targetBuild) {
|
|
||||||
console.log('Already at target version or newer');
|
if (patchesToApply.length === 0) {
|
||||||
|
console.log('Already at target version or invalid version sequence');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Use mirror's update plan for optimal patch routing
|
console.log(`Applying ${patchesToApply.length} differential patch(es): ${patchesToApply.join(' -> ')}`);
|
||||||
try {
|
|
||||||
const plan = await getUpdatePlan(currentBuild, targetBuild, branch);
|
|
||||||
|
|
||||||
console.log(`Applying ${plan.steps.length} patch(es): ${plan.steps.map(s => `${s.from}\u2192${s.to}`).join(' + ')}`);
|
for (let i = 0; i < patchesToApply.length; i++) {
|
||||||
|
const patchVersion = patchesToApply[i];
|
||||||
|
const versionDetails = await extractVersionDetails(patchVersion, branch);
|
||||||
|
|
||||||
for (let i = 0; i < plan.steps.length; i++) {
|
const canDifferential = canUseDifferentialUpdate(getInstalledClientVersion(), versionDetails);
|
||||||
const step = plan.steps[i];
|
|
||||||
const stepName = `${step.from}_to_${step.to}`;
|
if (!canDifferential || !versionDetails.differentialUrl) {
|
||||||
const archivePath = path.join(cacheDir, `${branch}_${stepName}.pwr`);
|
console.log(`WARNING: Differential patch not available for ${patchVersion}, using full archive`);
|
||||||
const isDifferential = step.from !== 0;
|
const archiveName = path.basename(versionDetails.fullUrl);
|
||||||
|
const archivePath = path.join(cacheDir, `${branch}_${archiveName}`);
|
||||||
|
|
||||||
if (progressCallback) {
|
if (progressCallback) {
|
||||||
progressCallback(`Downloading patch ${i + 1}/${plan.steps.length}: ${stepName}...`, 0, null, null, null);
|
progressCallback(`Downloading full archive for ${patchVersion} (${i + 1}/${patchesToApply.length})...`, 0, null, null, null);
|
||||||
}
|
|
||||||
|
|
||||||
await acquireGameArchive(step.url, archivePath, null, progressCallback);
|
|
||||||
|
|
||||||
if (progressCallback) {
|
|
||||||
progressCallback(`Applying patch ${i + 1}/${plan.steps.length}: ${stepName}...`, 50, null, null, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
await deployGameArchive(archivePath, gameDir, toolsDir, progressCallback, isDifferential);
|
|
||||||
|
|
||||||
// Clean up patch file
|
|
||||||
if (fs.existsSync(archivePath)) {
|
|
||||||
try {
|
|
||||||
fs.unlinkSync(archivePath);
|
|
||||||
console.log(`Cleaned up: ${stepName}.pwr`);
|
|
||||||
} catch (cleanupErr) {
|
|
||||||
console.warn(`Failed to cleanup: ${cleanupErr.message}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
saveVersionClient(`v${step.to}`);
|
|
||||||
console.log(`Patch ${stepName} applied (${i + 1}/${plan.steps.length})`);
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log(`Update completed. Version ${targetVersion} is now installed.`);
|
|
||||||
} catch (planError) {
|
|
||||||
console.error('Update plan failed:', planError.message);
|
|
||||||
console.log('Falling back to full archive download');
|
|
||||||
|
|
||||||
// Fallback: full install
|
|
||||||
const versionDetails = await extractVersionDetails(targetVersion, branch);
|
|
||||||
const archivePath = path.join(cacheDir, `${branch}_0_to_${targetBuild}.pwr`);
|
|
||||||
|
|
||||||
if (progressCallback) {
|
|
||||||
progressCallback(`Downloading full game archive (fallback)...`, 0, null, null, null);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
await acquireGameArchive(versionDetails.fullUrl, archivePath, null, progressCallback);
|
await acquireGameArchive(versionDetails.fullUrl, archivePath, null, progressCallback);
|
||||||
await deployGameArchive(archivePath, gameDir, toolsDir, progressCallback, false);
|
await deployGameArchive(archivePath, gameDir, toolsDir, progressCallback, false);
|
||||||
saveVersionClient(targetVersion);
|
} else {
|
||||||
|
console.log(`Applying differential patch: ${versionDetails.sourceVersion} -> ${patchVersion}`);
|
||||||
|
const archiveName = path.basename(versionDetails.differentialUrl);
|
||||||
|
const archivePath = path.join(cacheDir, `${branch}_patch_${archiveName}`);
|
||||||
|
|
||||||
|
if (progressCallback) {
|
||||||
|
progressCallback(`Applying patch ${i + 1}/${patchesToApply.length}: ${patchVersion}...`, 0, null, null, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await acquireGameArchive(versionDetails.differentialUrl, archivePath, versionDetails.checksum, progressCallback);
|
||||||
|
await deployGameArchive(archivePath, gameDir, toolsDir, progressCallback, true);
|
||||||
|
|
||||||
|
if (fs.existsSync(archivePath)) {
|
||||||
|
try {
|
||||||
|
fs.unlinkSync(archivePath);
|
||||||
|
console.log(`Cleaned up patch file: ${archiveName}`);
|
||||||
|
} catch (cleanupErr) {
|
||||||
|
console.warn(`Failed to cleanup patch file: ${cleanupErr.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
saveVersionClient(patchVersion);
|
||||||
|
console.log(`Patch ${patchVersion} applied successfully (${i + 1}/${patchesToApply.length})`);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Update completed successfully. Version ${targetVersion} is now installed.`);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function ensureGameInstalled(targetVersion, branch = 'release', progressCallback, gameDir = GAME_DIR, cacheDir = CACHE_DIR, toolsDir = TOOLS_DIR) {
|
async function ensureGameInstalled(targetVersion, branch = 'release', progressCallback, gameDir = GAME_DIR, cacheDir = CACHE_DIR, toolsDir = TOOLS_DIR) {
|
||||||
|
|||||||
@@ -61,39 +61,12 @@ async function fetchAuthTokens(uuid, name) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
const identityToken = data.IdentityToken || data.identityToken;
|
|
||||||
const sessionToken = data.SessionToken || data.sessionToken;
|
|
||||||
|
|
||||||
// Verify the identity token has the correct username
|
|
||||||
// This catches cases where the auth server defaults to "Player"
|
|
||||||
try {
|
|
||||||
const parts = identityToken.split('.');
|
|
||||||
if (parts.length >= 2) {
|
|
||||||
const payload = JSON.parse(Buffer.from(parts[1], 'base64url').toString());
|
|
||||||
if (payload.username && payload.username !== name && name !== 'Player') {
|
|
||||||
console.warn(`[Auth] Token username mismatch: token has "${payload.username}", expected "${name}". Retrying...`);
|
|
||||||
// Retry once with explicit name
|
|
||||||
const retryResponse = await fetch(`${authServerUrl}/game-session/child`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ uuid: uuid, name: name, scopes: ['hytale:server', 'hytale:client'] })
|
|
||||||
});
|
|
||||||
if (retryResponse.ok) {
|
|
||||||
const retryData = await retryResponse.json();
|
|
||||||
console.log('[Auth] Retry successful');
|
|
||||||
return {
|
|
||||||
identityToken: retryData.IdentityToken || retryData.identityToken,
|
|
||||||
sessionToken: retryData.SessionToken || retryData.sessionToken
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (verifyErr) {
|
|
||||||
console.warn('[Auth] Token verification skipped:', verifyErr.message);
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('Auth tokens received from server');
|
console.log('Auth tokens received from server');
|
||||||
return { identityToken, sessionToken };
|
|
||||||
|
return {
|
||||||
|
identityToken: data.IdentityToken || data.identityToken,
|
||||||
|
sessionToken: data.SessionToken || data.sessionToken
|
||||||
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Failed to fetch auth tokens:', error.message);
|
console.error('Failed to fetch auth tokens:', error.message);
|
||||||
// Fallback to local generation if server unavailable
|
// Fallback to local generation if server unavailable
|
||||||
@@ -250,7 +223,6 @@ async function launchGame(playerNameOverride = null, progressCallback, javaPathO
|
|||||||
}
|
}
|
||||||
|
|
||||||
const uuid = getUuidForUser(playerName);
|
const uuid = getUuidForUser(playerName);
|
||||||
console.log(`[Launcher] UUID for "${playerName}": ${uuid} (verify this stays constant across launches)`);
|
|
||||||
|
|
||||||
// Fetch tokens from auth server
|
// Fetch tokens from auth server
|
||||||
if (progressCallback) {
|
if (progressCallback) {
|
||||||
@@ -440,7 +412,7 @@ exec "$REAL_JAVA" "\${ARGS[@]}"
|
|||||||
// This enables runtime auth patching without modifying the server JAR
|
// This enables runtime auth patching without modifying the server JAR
|
||||||
const agentJar = path.join(gameLatest, 'Server', 'dualauth-agent.jar');
|
const agentJar = path.join(gameLatest, 'Server', 'dualauth-agent.jar');
|
||||||
if (fs.existsSync(agentJar)) {
|
if (fs.existsSync(agentJar)) {
|
||||||
const agentFlag = `-javaagent:"${agentJar}"`;
|
const agentFlag = `-javaagent:${agentJar}`;
|
||||||
env.JAVA_TOOL_OPTIONS = env.JAVA_TOOL_OPTIONS
|
env.JAVA_TOOL_OPTIONS = env.JAVA_TOOL_OPTIONS
|
||||||
? `${env.JAVA_TOOL_OPTIONS} ${agentFlag}`
|
? `${env.JAVA_TOOL_OPTIONS} ${agentFlag}`
|
||||||
: agentFlag;
|
: agentFlag;
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ const { promisify } = require('util');
|
|||||||
const { getResolvedAppDir, findClientPath, findUserDataPath, findUserDataRecursive, GAME_DIR, CACHE_DIR, TOOLS_DIR } = require('../core/paths');
|
const { getResolvedAppDir, findClientPath, findUserDataPath, findUserDataRecursive, GAME_DIR, CACHE_DIR, TOOLS_DIR } = require('../core/paths');
|
||||||
const { getOS, getArch } = require('../utils/platformUtils');
|
const { getOS, getArch } = require('../utils/platformUtils');
|
||||||
const { downloadFile, retryDownload, retryStalledDownload, MAX_AUTOMATIC_STALL_RETRIES } = require('../utils/fileManager');
|
const { downloadFile, retryDownload, retryStalledDownload, MAX_AUTOMATIC_STALL_RETRIES } = require('../utils/fileManager');
|
||||||
const { getLatestClientVersion, getInstalledClientVersion, getUpdatePlan, extractVersionNumber } = require('../services/versionManager');
|
const { getLatestClientVersion, getInstalledClientVersion } = require('../services/versionManager');
|
||||||
const { FORCE_CLEAN_INSTALL_VERSION, CLEAN_INSTALL_TEST_VERSION } = require('../core/testConfig');
|
const { FORCE_CLEAN_INSTALL_VERSION, CLEAN_INSTALL_TEST_VERSION } = require('../core/testConfig');
|
||||||
const { installButler } = require('./butlerManager');
|
const { installButler } = require('./butlerManager');
|
||||||
const { downloadAndReplaceHomePageUI, downloadAndReplaceLogo } = require('./uiFileManager');
|
const { downloadAndReplaceHomePageUI, downloadAndReplaceLogo } = require('./uiFileManager');
|
||||||
@@ -64,7 +64,7 @@ async function safeRemoveDirectory(dirPath, maxRetries = 3) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function downloadPWR(branch = 'release', fileName = 'v8', progressCallback, cacheDir = CACHE_DIR, manualRetry = false, directUrl = null, expectedSize = null) {
|
async function downloadPWR(branch = 'release', fileName = 'v8', progressCallback, cacheDir = CACHE_DIR, manualRetry = false) {
|
||||||
const osName = getOS();
|
const osName = getOS();
|
||||||
const arch = getArch();
|
const arch = getArch();
|
||||||
|
|
||||||
@@ -72,69 +72,43 @@ async function downloadPWR(branch = 'release', fileName = 'v8', progressCallback
|
|||||||
throw new Error('Hytale x86_64 Intel Mac Support has not been released yet. Please check back later.');
|
throw new Error('Hytale x86_64 Intel Mac Support has not been released yet. Please check back later.');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const { getPWRUrlFromNewAPI } = require('../services/versionManager');
|
||||||
|
|
||||||
let url;
|
let url;
|
||||||
|
let isUsingNewAPI = false;
|
||||||
|
|
||||||
if (directUrl) {
|
|
||||||
url = directUrl;
|
|
||||||
console.log(`[DownloadPWR] Using direct URL: ${url}`);
|
|
||||||
} else {
|
|
||||||
const { getPWRUrl } = require('../services/versionManager');
|
|
||||||
try {
|
try {
|
||||||
console.log(`[DownloadPWR] Fetching mirror URL for branch: ${branch}, version: ${fileName}`);
|
console.log(`[DownloadPWR] Fetching URL from new API for branch: ${branch}, version: ${fileName}`);
|
||||||
url = await getPWRUrl(branch, fileName);
|
url = await getPWRUrlFromNewAPI(branch, fileName);
|
||||||
console.log(`[DownloadPWR] Mirror URL: ${url}`);
|
isUsingNewAPI = true;
|
||||||
|
console.log(`[DownloadPWR] Using new API URL: ${url}`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`[DownloadPWR] Failed to get mirror URL: ${error.message}`);
|
console.error(`[DownloadPWR] Failed to get URL from new API: ${error.message}`);
|
||||||
const { getPatchesBaseUrl } = require('../services/versionManager');
|
console.log(`[DownloadPWR] Falling back to old URL format`);
|
||||||
const baseUrl = await getPatchesBaseUrl();
|
url = `https://game-patches.hytale.com/patches/${osName}/${arch}/${branch}/0/${fileName}.pwr`;
|
||||||
url = `${baseUrl}/${osName}/${arch}/${branch}/0_to_${extractVersionNumber(fileName)}.pwr`;
|
|
||||||
console.log(`[DownloadPWR] Fallback URL: ${url}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Look up expected file size from manifest if not provided
|
|
||||||
if (!expectedSize) {
|
|
||||||
try {
|
|
||||||
const { fetchMirrorManifest } = require('../services/versionManager');
|
|
||||||
const manifest = await fetchMirrorManifest();
|
|
||||||
// Try to match: "0_to_11" format or "v11" format
|
|
||||||
const versionMatch = fileName.match(/^(\d+)_to_(\d+)$/);
|
|
||||||
let manifestKey;
|
|
||||||
if (versionMatch) {
|
|
||||||
manifestKey = `${osName}/${arch}/${branch}/${fileName}.pwr`;
|
|
||||||
} else {
|
|
||||||
const buildNum = extractVersionNumber(fileName);
|
|
||||||
manifestKey = `${osName}/${arch}/${branch}/0_to_${buildNum}.pwr`;
|
|
||||||
}
|
|
||||||
if (manifest.files[manifestKey]) {
|
|
||||||
expectedSize = manifest.files[manifestKey].size;
|
|
||||||
console.log(`[PWR] Expected size from manifest: ${(expectedSize / 1024 / 1024).toFixed(2)} MB`);
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
console.log(`[PWR] Could not fetch expected size from manifest: ${e.message}`);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const dest = path.join(cacheDir, `${branch}_${fileName}.pwr`);
|
const dest = path.join(cacheDir, `${branch}_${fileName}.pwr`);
|
||||||
|
|
||||||
// Check if file exists and validate it
|
// Check if file exists and validate it
|
||||||
if (fs.existsSync(dest) && !manualRetry) {
|
if (fs.existsSync(dest) && !manualRetry) {
|
||||||
|
console.log('PWR file found in cache:', dest);
|
||||||
|
|
||||||
|
// Validate file size (PWR files should be > 1MB and >= 1.5GB for complete downloads)
|
||||||
const stats = fs.statSync(dest);
|
const stats = fs.statSync(dest);
|
||||||
if (stats.size > 1024 * 1024) {
|
if (stats.size < 1024 * 1024) {
|
||||||
// Validate against expected size - reject if file is truncated (< 99% of expected)
|
return false;
|
||||||
if (expectedSize && stats.size < expectedSize * 0.99) {
|
|
||||||
console.log(`[PWR] Cached file truncated: ${(stats.size / 1024 / 1024).toFixed(2)} MB, expected ${(expectedSize / 1024 / 1024).toFixed(2)} MB. Deleting and re-downloading.`);
|
|
||||||
fs.unlinkSync(dest);
|
|
||||||
} else {
|
|
||||||
console.log(`[PWR] Using cached file: ${dest} (${(stats.size / 1024 / 1024).toFixed(2)} MB)`);
|
|
||||||
return dest;
|
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
console.log(`[PWR] Cached file too small (${stats.size} bytes), re-downloading`);
|
// Check if file is under 1.5 GB (incomplete download)
|
||||||
|
const sizeInMB = stats.size / 1024 / 1024;
|
||||||
|
if (sizeInMB < 1500) {
|
||||||
|
console.log(`[PWR Validation] File appears incomplete: ${sizeInMB.toFixed(2)} MB < 1.5 GB`);
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`[DownloadPWR] Downloading from: ${url}`);
|
console.log(`Fetching PWR patch file from ${isUsingNewAPI ? 'NEW API' : 'old API'}:`, url);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (manualRetry) {
|
if (manualRetry) {
|
||||||
@@ -160,7 +134,7 @@ async function downloadPWR(branch = 'release', fileName = 'v8', progressCallback
|
|||||||
const retryStats = fs.statSync(dest);
|
const retryStats = fs.statSync(dest);
|
||||||
console.log(`PWR file downloaded (auto-retry), size: ${(retryStats.size / 1024 / 1024).toFixed(2)} MB`);
|
console.log(`PWR file downloaded (auto-retry), size: ${(retryStats.size / 1024 / 1024).toFixed(2)} MB`);
|
||||||
|
|
||||||
if (!validatePWRFile(dest, expectedSize)) {
|
if (!validatePWRFile(dest)) {
|
||||||
console.log(`[PWR Validation] PWR file validation failed after auto-retry, deleting corrupted file: ${dest}`);
|
console.log(`[PWR Validation] PWR file validation failed after auto-retry, deleting corrupted file: ${dest}`);
|
||||||
fs.unlinkSync(dest);
|
fs.unlinkSync(dest);
|
||||||
throw new Error('Downloaded PWR file is corrupted or invalid after automatic retry. Please retry manually');
|
throw new Error('Downloaded PWR file is corrupted or invalid after automatic retry. Please retry manually');
|
||||||
@@ -211,7 +185,7 @@ async function downloadPWR(branch = 'release', fileName = 'v8', progressCallback
|
|||||||
const stats = fs.statSync(dest);
|
const stats = fs.statSync(dest);
|
||||||
console.log(`PWR file downloaded, size: ${(stats.size / 1024 / 1024).toFixed(2)} MB`);
|
console.log(`PWR file downloaded, size: ${(stats.size / 1024 / 1024).toFixed(2)} MB`);
|
||||||
|
|
||||||
if (!validatePWRFile(dest, expectedSize)) {
|
if (!validatePWRFile(dest)) {
|
||||||
console.log(`[PWR Validation] PWR file validation failed, deleting corrupted file: ${dest}`);
|
console.log(`[PWR Validation] PWR file validation failed, deleting corrupted file: ${dest}`);
|
||||||
fs.unlinkSync(dest);
|
fs.unlinkSync(dest);
|
||||||
throw new Error('Downloaded PWR file is corrupted or invalid. Please retry');
|
throw new Error('Downloaded PWR file is corrupted or invalid. Please retry');
|
||||||
@@ -229,7 +203,7 @@ async function retryPWRDownload(branch, fileName, progressCallback, cacheDir = C
|
|||||||
return await downloadPWR(branch, fileName, progressCallback, cacheDir, true);
|
return await downloadPWR(branch, fileName, progressCallback, cacheDir, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function applyPWR(pwrFile, progressCallback, gameDir = GAME_DIR, toolsDir = TOOLS_DIR, branch = 'release', cacheDir = CACHE_DIR, skipExistingCheck = false) {
|
async function applyPWR(pwrFile, progressCallback, gameDir = GAME_DIR, toolsDir = TOOLS_DIR, branch = 'release', cacheDir = CACHE_DIR) {
|
||||||
console.log(`[Butler] Starting PWR application with:`);
|
console.log(`[Butler] Starting PWR application with:`);
|
||||||
console.log(`[Butler] - PWR file: ${pwrFile}`);
|
console.log(`[Butler] - PWR file: ${pwrFile}`);
|
||||||
console.log(`[Butler] - Staging dir: ${path.join(gameDir, 'staging-temp')}`);
|
console.log(`[Butler] - Staging dir: ${path.join(gameDir, 'staging-temp')}`);
|
||||||
@@ -253,13 +227,12 @@ async function applyPWR(pwrFile, progressCallback, gameDir = GAME_DIR, toolsDir
|
|||||||
const gameLatest = gameDir;
|
const gameLatest = gameDir;
|
||||||
const stagingDir = path.join(gameLatest, 'staging-temp');
|
const stagingDir = path.join(gameLatest, 'staging-temp');
|
||||||
|
|
||||||
if (!skipExistingCheck) {
|
|
||||||
const clientPath = findClientPath(gameLatest);
|
const clientPath = findClientPath(gameLatest);
|
||||||
|
|
||||||
if (clientPath) {
|
if (clientPath) {
|
||||||
console.log('Game files detected, skipping patch installation.');
|
console.log('Game files detected, skipping patch installation.');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Validate and prepare directories
|
// Validate and prepare directories
|
||||||
validateGameDirectory(gameLatest, stagingDir);
|
validateGameDirectory(gameLatest, stagingDir);
|
||||||
@@ -439,65 +412,6 @@ async function updateGameFiles(newVersion, progressCallback, gameDir = GAME_DIR,
|
|||||||
}
|
}
|
||||||
console.log(`Updating game files to version: ${newVersion} (branch: ${branch})`);
|
console.log(`Updating game files to version: ${newVersion} (branch: ${branch})`);
|
||||||
|
|
||||||
// Determine update strategy: intermediate patches vs full reinstall
|
|
||||||
const currentVersion = loadVersionClient();
|
|
||||||
const currentBuild = extractVersionNumber(currentVersion) || 0;
|
|
||||||
const targetBuild = extractVersionNumber(newVersion);
|
|
||||||
|
|
||||||
let useIntermediatePatches = false;
|
|
||||||
let updatePlan = null;
|
|
||||||
|
|
||||||
if (currentBuild > 0 && currentBuild < targetBuild) {
|
|
||||||
try {
|
|
||||||
updatePlan = await getUpdatePlan(currentBuild, targetBuild, branch);
|
|
||||||
useIntermediatePatches = !updatePlan.isFullInstall;
|
|
||||||
if (useIntermediatePatches) {
|
|
||||||
const totalMB = (updatePlan.totalSize / 1024 / 1024).toFixed(0);
|
|
||||||
console.log(`[UpdateGameFiles] Using intermediate patches: ${updatePlan.steps.map(s => `${s.from}\u2192${s.to}`).join(' + ')} (${totalMB} MB)`);
|
|
||||||
}
|
|
||||||
} catch (planError) {
|
|
||||||
console.warn('[UpdateGameFiles] Could not get update plan, falling back to full install:', planError.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (useIntermediatePatches && updatePlan) {
|
|
||||||
// Apply intermediate patches directly to game dir
|
|
||||||
for (let i = 0; i < updatePlan.steps.length; i++) {
|
|
||||||
const step = updatePlan.steps[i];
|
|
||||||
const stepName = `${step.from}_to_${step.to}`;
|
|
||||||
|
|
||||||
if (progressCallback) {
|
|
||||||
const progress = 20 + Math.round((i / updatePlan.steps.length) * 60);
|
|
||||||
progressCallback(`Downloading patch ${i + 1}/${updatePlan.steps.length} (${stepName})...`, progress, null, null, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
const pwrFile = await downloadPWR(branch, stepName, progressCallback, cacheDir, false, step.url, step.size);
|
|
||||||
|
|
||||||
if (!pwrFile) {
|
|
||||||
throw new Error(`Failed to download patch ${stepName}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (progressCallback) {
|
|
||||||
progressCallback(`Applying patch ${i + 1}/${updatePlan.steps.length} (${stepName})...`, null, null, null, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
await applyPWR(pwrFile, progressCallback, gameDir, toolsDir, branch, cacheDir, true);
|
|
||||||
|
|
||||||
// Clean up PWR file from cache
|
|
||||||
try {
|
|
||||||
if (fs.existsSync(pwrFile)) {
|
|
||||||
fs.unlinkSync(pwrFile);
|
|
||||||
}
|
|
||||||
} catch (delErr) {
|
|
||||||
console.warn('[UpdateGameFiles] Failed to delete PWR from cache:', delErr.message);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Save intermediate version so we can resume if interrupted
|
|
||||||
saveVersionClient(`v${step.to}`);
|
|
||||||
console.log(`[UpdateGameFiles] Applied patch ${stepName} (${i + 1}/${updatePlan.steps.length})`);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Full install: download 0->target, apply to temp dir, swap
|
|
||||||
tempUpdateDir = path.join(gameDir, '..', 'temp_update');
|
tempUpdateDir = path.join(gameDir, '..', 'temp_update');
|
||||||
|
|
||||||
if (fs.existsSync(tempUpdateDir)) {
|
if (fs.existsSync(tempUpdateDir)) {
|
||||||
@@ -516,7 +430,7 @@ async function updateGameFiles(newVersion, progressCallback, gameDir = GAME_DIR,
|
|||||||
}
|
}
|
||||||
|
|
||||||
await applyPWR(pwrFile, progressCallback, tempUpdateDir, toolsDir, branch, cacheDir);
|
await applyPWR(pwrFile, progressCallback, tempUpdateDir, toolsDir, branch, cacheDir);
|
||||||
|
// Delete PWR file from cache after successful update
|
||||||
try {
|
try {
|
||||||
if (fs.existsSync(pwrFile)) {
|
if (fs.existsSync(pwrFile)) {
|
||||||
fs.unlinkSync(pwrFile);
|
fs.unlinkSync(pwrFile);
|
||||||
@@ -525,7 +439,6 @@ async function updateGameFiles(newVersion, progressCallback, gameDir = GAME_DIR,
|
|||||||
} catch (delErr) {
|
} catch (delErr) {
|
||||||
console.warn('[UpdateGameFiles] Failed to delete PWR file from cache:', delErr.message);
|
console.warn('[UpdateGameFiles] Failed to delete PWR file from cache:', delErr.message);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (progressCallback) {
|
if (progressCallback) {
|
||||||
progressCallback('Replacing game files...', 80, null, null, null);
|
progressCallback('Replacing game files...', 80, null, null, null);
|
||||||
}
|
}
|
||||||
@@ -550,7 +463,6 @@ async function updateGameFiles(newVersion, progressCallback, gameDir = GAME_DIR,
|
|||||||
}
|
}
|
||||||
|
|
||||||
fs.renameSync(tempUpdateDir, gameDir);
|
fs.renameSync(tempUpdateDir, gameDir);
|
||||||
}
|
|
||||||
|
|
||||||
const homeUIResult = await downloadAndReplaceHomePageUI(gameDir, progressCallback);
|
const homeUIResult = await downloadAndReplaceHomePageUI(gameDir, progressCallback);
|
||||||
console.log('HomePage.ui update result after update:', homeUIResult);
|
console.log('HomePage.ui update result after update:', homeUIResult);
|
||||||
@@ -921,8 +833,7 @@ function validateGameDirectory(gameDir, stagingDir) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Enhanced PWR file validation
|
// Enhanced PWR file validation
|
||||||
// Accepts intermediate patches (50+ MB) and full installs (1.5+ GB)
|
function validatePWRFile(filePath) {
|
||||||
function validatePWRFile(filePath, expectedSize = null) {
|
|
||||||
try {
|
try {
|
||||||
if (!fs.existsSync(filePath)) {
|
if (!fs.existsSync(filePath)) {
|
||||||
return false;
|
return false;
|
||||||
@@ -931,20 +842,27 @@ function validatePWRFile(filePath, expectedSize = null) {
|
|||||||
const stats = fs.statSync(filePath);
|
const stats = fs.statSync(filePath);
|
||||||
const sizeInMB = stats.size / 1024 / 1024;
|
const sizeInMB = stats.size / 1024 / 1024;
|
||||||
|
|
||||||
// PWR files should be at least 1 MB
|
|
||||||
if (stats.size < 1024 * 1024) {
|
if (stats.size < 1024 * 1024) {
|
||||||
console.log(`[PWR Validation] File too small: ${sizeInMB.toFixed(2)} MB`);
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate against expected size if known (reject if < 99% of expected)
|
// Check if file is under 1.5 GB (incomplete download)
|
||||||
if (expectedSize && stats.size < expectedSize * 0.99) {
|
if (sizeInMB < 1500) {
|
||||||
const expectedMB = expectedSize / 1024 / 1024;
|
console.log(`[PWR Validation] File appears incomplete: ${sizeInMB.toFixed(2)} MB < 1.5 GB`);
|
||||||
console.log(`[PWR Validation] File truncated: ${sizeInMB.toFixed(2)} MB, expected ${expectedMB.toFixed(2)} MB`);
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`[PWR Validation] File size: ${sizeInMB.toFixed(2)} MB - OK`);
|
// Basic file header validation (PWR files should have specific headers)
|
||||||
|
const buffer = fs.readFileSync(filePath, { start: 0, end: 20 });
|
||||||
|
if (buffer.length < 10) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for common PWR magic bytes or patterns
|
||||||
|
// This is a basic check - could be enhanced with actual PWR format specification
|
||||||
|
const header = buffer.toString('hex', 0, 10);
|
||||||
|
console.log(`[PWR Validation] File header: ${header}`);
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`[PWR Validation] Error:`, error.message);
|
console.error(`[PWR Validation] Error:`, error.message);
|
||||||
|
|||||||
@@ -1,500 +1,287 @@
|
|||||||
const axios = require('axios');
|
const axios = require('axios');
|
||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const path = require('path');
|
|
||||||
const { getOS, getArch } = require('../utils/platformUtils');
|
const { getOS, getArch } = require('../utils/platformUtils');
|
||||||
|
const { smartRequest } = require('../utils/proxyClient');
|
||||||
|
|
||||||
// Patches base URL fetched dynamically via multi-source fallback chain
|
const BASE_PATCH_URL = 'https://game-patches.hytale.com/patches';
|
||||||
const AUTH_DOMAIN = process.env.HYTALE_AUTH_DOMAIN || 'auth.sanasol.ws';
|
const MANIFEST_API = 'https://files.hytalef2p.com/api/patch_manifest';
|
||||||
const PATCHES_CONFIG_SOURCES = [
|
const NEW_API_URL = 'https://thecute.cloud/ShipOfYarn/api.php';
|
||||||
{ type: 'http', url: `https://${AUTH_DOMAIN}/api/patches-config`, name: 'primary' },
|
|
||||||
{ type: 'http', url: 'https://htdwnldsan.top/patches-config', name: 'backup-1' },
|
|
||||||
{ type: 'http', url: 'https://dl1.htdwnldsan.top/patches-config', name: 'backup-2' },
|
|
||||||
{ type: 'doh', name: '_patches.htdwnldsan.top', name_label: 'dns-txt' },
|
|
||||||
];
|
|
||||||
const HARDCODED_FALLBACK = 'https://dl.vboro.de/patches';
|
|
||||||
|
|
||||||
// Alternative mirrors (non-Cloudflare) for regions where CF is blocked
|
let apiCache = null;
|
||||||
const NON_CF_MIRRORS = [
|
let apiCacheTime = 0;
|
||||||
'https://dl1.htdwnldsan.top',
|
const API_CACHE_DURATION = 60000; // 1 minute
|
||||||
'https://htdwnldsan.top/patches',
|
|
||||||
];
|
|
||||||
|
|
||||||
// Fallback: latest known build number if manifest is unreachable
|
async function fetchNewAPI() {
|
||||||
const FALLBACK_LATEST_BUILD = 11;
|
|
||||||
|
|
||||||
let patchesBaseUrl = null;
|
|
||||||
let patchesConfigTime = 0;
|
|
||||||
const PATCHES_CONFIG_CACHE_DURATION = 300000; // 5 minutes
|
|
||||||
|
|
||||||
let manifestCache = null;
|
|
||||||
let manifestCacheTime = 0;
|
|
||||||
const MANIFEST_CACHE_DURATION = 60000; // 1 minute
|
|
||||||
|
|
||||||
// Disk cache path for patches URL (survives restarts)
|
|
||||||
function getDiskCachePath() {
|
|
||||||
const os = require('os');
|
|
||||||
const home = os.homedir();
|
|
||||||
let appDir;
|
|
||||||
if (process.platform === 'win32') {
|
|
||||||
appDir = path.join(home, 'AppData', 'Local', 'HytaleF2P');
|
|
||||||
} else if (process.platform === 'darwin') {
|
|
||||||
appDir = path.join(home, 'Library', 'Application Support', 'HytaleF2P');
|
|
||||||
} else {
|
|
||||||
appDir = path.join(home, '.hytalef2p');
|
|
||||||
}
|
|
||||||
return path.join(appDir, 'patches-url-cache.json');
|
|
||||||
}
|
|
||||||
|
|
||||||
function saveDiskCache(url) {
|
|
||||||
try {
|
|
||||||
const cachePath = getDiskCachePath();
|
|
||||||
const dir = path.dirname(cachePath);
|
|
||||||
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
|
|
||||||
fs.writeFileSync(cachePath, JSON.stringify({ patches_url: url, ts: Date.now() }), 'utf8');
|
|
||||||
} catch (e) {
|
|
||||||
// Non-critical, ignore
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function loadDiskCache() {
|
|
||||||
try {
|
|
||||||
const cachePath = getDiskCachePath();
|
|
||||||
if (fs.existsSync(cachePath)) {
|
|
||||||
const data = JSON.parse(fs.readFileSync(cachePath, 'utf8'));
|
|
||||||
if (data && data.patches_url) return data.patches_url;
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
// Non-critical, ignore
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetch patches URL from a single HTTP config endpoint
|
|
||||||
*/
|
|
||||||
async function fetchFromHttp(url) {
|
|
||||||
const response = await axios.get(url, {
|
|
||||||
timeout: 8000,
|
|
||||||
headers: { 'User-Agent': 'Hytale-F2P-Launcher' }
|
|
||||||
});
|
|
||||||
if (response.data && response.data.patches_url) {
|
|
||||||
return response.data.patches_url.replace(/\/+$/, '');
|
|
||||||
}
|
|
||||||
throw new Error('Invalid response');
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetch patches URL from DNS TXT record via DNS-over-HTTPS
|
|
||||||
*/
|
|
||||||
async function fetchFromDoh(recordName) {
|
|
||||||
const dohEndpoints = [
|
|
||||||
{ url: 'https://dns.google/resolve', params: { name: recordName, type: 'TXT' } },
|
|
||||||
{ url: 'https://cloudflare-dns.com/dns-query', params: { name: recordName, type: 'TXT' }, headers: { 'Accept': 'application/dns-json' } },
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const endpoint of dohEndpoints) {
|
|
||||||
try {
|
|
||||||
const response = await axios.get(endpoint.url, {
|
|
||||||
params: endpoint.params,
|
|
||||||
headers: { 'User-Agent': 'Hytale-F2P-Launcher', ...(endpoint.headers || {}) },
|
|
||||||
timeout: 5000
|
|
||||||
});
|
|
||||||
const answers = response.data && response.data.Answer;
|
|
||||||
if (answers && answers.length > 0) {
|
|
||||||
// TXT records are quoted, strip quotes
|
|
||||||
const txt = answers[0].data.replace(/^"|"$/g, '');
|
|
||||||
if (txt.startsWith('http')) return txt.replace(/\/+$/, '');
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
// Try next DoH endpoint
|
|
||||||
}
|
|
||||||
}
|
|
||||||
throw new Error('All DoH endpoints failed');
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetch patches base URL with hardened multi-source fallback chain:
|
|
||||||
* 1. Memory cache (5 min)
|
|
||||||
* 2. HTTP: auth.sanasol.ws (primary)
|
|
||||||
* 3. HTTP: htdwnldsan.top (backup, different host/domain/registrar)
|
|
||||||
* 4. DNS TXT: _patches.htdwnldsan.top via DoH (different protocol layer)
|
|
||||||
* 5. Disk cache (survives restarts, never expires)
|
|
||||||
* 6. Hardcoded fallback URL (last resort)
|
|
||||||
*/
|
|
||||||
async function getPatchesBaseUrl() {
|
|
||||||
const now = Date.now();
|
const now = Date.now();
|
||||||
|
|
||||||
// 1. Memory cache
|
if (apiCache && (now - apiCacheTime) < API_CACHE_DURATION) {
|
||||||
if (patchesBaseUrl && (now - patchesConfigTime) < PATCHES_CONFIG_CACHE_DURATION) {
|
console.log('[NewAPI] Using cached API data');
|
||||||
return patchesBaseUrl;
|
return apiCache;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2-4. Try all sources: HTTP endpoints first, then DoH
|
|
||||||
for (const source of PATCHES_CONFIG_SOURCES) {
|
|
||||||
try {
|
try {
|
||||||
let url;
|
console.log('[NewAPI] Fetching from:', NEW_API_URL);
|
||||||
if (source.type === 'http') {
|
const response = await axios.get(NEW_API_URL, {
|
||||||
console.log(`[Mirror] Trying ${source.name}: ${source.url}`);
|
|
||||||
url = await fetchFromHttp(source.url);
|
|
||||||
} else if (source.type === 'doh') {
|
|
||||||
console.log(`[Mirror] Trying ${source.name_label}: ${source.name}`);
|
|
||||||
url = await fetchFromDoh(source.name);
|
|
||||||
}
|
|
||||||
if (url) {
|
|
||||||
patchesBaseUrl = url;
|
|
||||||
patchesConfigTime = now;
|
|
||||||
saveDiskCache(url);
|
|
||||||
console.log(`[Mirror] Patches URL (via ${source.name || source.name_label}): ${url}`);
|
|
||||||
return url;
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
console.warn(`[Mirror] ${source.name || source.name_label} failed: ${e.message}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5. Stale memory cache (any age)
|
|
||||||
if (patchesBaseUrl) {
|
|
||||||
console.log('[Mirror] All sources failed, using stale memory cache:', patchesBaseUrl);
|
|
||||||
return patchesBaseUrl;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 6. Disk cache (survives restarts)
|
|
||||||
const diskUrl = loadDiskCache();
|
|
||||||
if (diskUrl) {
|
|
||||||
patchesBaseUrl = diskUrl;
|
|
||||||
console.log('[Mirror] All sources failed, using disk cache:', diskUrl);
|
|
||||||
return diskUrl;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 7. Hardcoded fallback
|
|
||||||
console.warn('[Mirror] All sources + caches exhausted, using hardcoded fallback:', HARDCODED_FALLBACK);
|
|
||||||
patchesBaseUrl = HARDCODED_FALLBACK;
|
|
||||||
return HARDCODED_FALLBACK;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get all available mirror base URLs (primary + non-Cloudflare fallbacks)
|
|
||||||
* Used by download logic to retry on different mirrors when primary is blocked
|
|
||||||
*/
|
|
||||||
async function getAllMirrorUrls() {
|
|
||||||
const primary = await getPatchesBaseUrl();
|
|
||||||
// Deduplicate: don't include mirrors that match primary
|
|
||||||
const mirrors = NON_CF_MIRRORS.filter(m => m !== primary);
|
|
||||||
return [primary, ...mirrors];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetch the mirror manifest — tries primary URL first, then non-Cloudflare mirrors
|
|
||||||
*/
|
|
||||||
async function fetchMirrorManifest() {
|
|
||||||
const now = Date.now();
|
|
||||||
|
|
||||||
if (manifestCache && (now - manifestCacheTime) < MANIFEST_CACHE_DURATION) {
|
|
||||||
console.log('[Mirror] Using cached manifest');
|
|
||||||
return manifestCache;
|
|
||||||
}
|
|
||||||
|
|
||||||
const mirrors = await getAllMirrorUrls();
|
|
||||||
|
|
||||||
for (let i = 0; i < mirrors.length; i++) {
|
|
||||||
const baseUrl = mirrors[i];
|
|
||||||
const manifestUrl = `${baseUrl}/manifest.json`;
|
|
||||||
try {
|
|
||||||
console.log(`[Mirror] Fetching manifest from: ${manifestUrl}`);
|
|
||||||
const response = await axios.get(manifestUrl, {
|
|
||||||
timeout: 15000,
|
timeout: 15000,
|
||||||
maxRedirects: 5,
|
headers: {
|
||||||
headers: { 'User-Agent': 'Hytale-F2P-Launcher' }
|
'User-Agent': 'Hytale-F2P-Launcher'
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
if (response.data && response.data.files) {
|
if (response.data && response.data.hytale) {
|
||||||
manifestCache = response.data;
|
apiCache = response.data;
|
||||||
manifestCacheTime = now;
|
apiCacheTime = now;
|
||||||
// If a non-primary mirror worked, switch to it for downloads too
|
console.log('[NewAPI] API data fetched and cached successfully');
|
||||||
if (i > 0) {
|
|
||||||
console.log(`[Mirror] Primary unreachable, switching to mirror: ${baseUrl}`);
|
|
||||||
patchesBaseUrl = baseUrl;
|
|
||||||
patchesConfigTime = now;
|
|
||||||
saveDiskCache(baseUrl);
|
|
||||||
}
|
|
||||||
console.log('[Mirror] Manifest fetched successfully');
|
|
||||||
return response.data;
|
return response.data;
|
||||||
|
} else {
|
||||||
|
throw new Error('Invalid API response structure');
|
||||||
}
|
}
|
||||||
throw new Error('Invalid manifest structure');
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const isTimeout = error.code === 'ETIMEDOUT' || error.code === 'ECONNABORTED' || error.message.includes('timeout');
|
console.error('[NewAPI] Error fetching API:', error.message);
|
||||||
console.error(`[Mirror] Error fetching manifest from ${baseUrl}: ${error.message}${isTimeout ? ' (Cloudflare may be blocked)' : ''}`);
|
if (apiCache) {
|
||||||
if (i < mirrors.length - 1) {
|
console.log('[NewAPI] Using expired cache due to error');
|
||||||
console.log(`[Mirror] Trying next mirror...`);
|
return apiCache;
|
||||||
}
|
}
|
||||||
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// All mirrors failed — use cached manifest if available
|
|
||||||
if (manifestCache) {
|
|
||||||
console.log('[Mirror] All mirrors failed, using expired cache');
|
|
||||||
return manifestCache;
|
|
||||||
}
|
|
||||||
throw new Error('All mirrors failed and no cached manifest available');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
async function getLatestVersionFromNewAPI(branch = 'release') {
|
||||||
* Parse manifest to get available patches for current platform
|
try {
|
||||||
* Returns array of { from, to, key, size }
|
const apiData = await fetchNewAPI();
|
||||||
*/
|
const osName = getOS();
|
||||||
function getPlatformPatches(manifest, branch = 'release') {
|
|
||||||
const os = getOS();
|
|
||||||
const arch = getArch();
|
const arch = getArch();
|
||||||
const prefix = `${os}/${arch}/${branch}/`;
|
|
||||||
const patches = [];
|
|
||||||
|
|
||||||
for (const [key, info] of Object.entries(manifest.files)) {
|
let osKey = osName;
|
||||||
if (key.startsWith(prefix) && key.endsWith('.pwr')) {
|
if (osName === 'darwin') {
|
||||||
const filename = key.slice(prefix.length, -4); // e.g., "0_to_11"
|
osKey = 'mac';
|
||||||
const match = filename.match(/^(\d+)_to_(\d+)$/);
|
}
|
||||||
if (match) {
|
|
||||||
patches.push({
|
const branchData = apiData.hytale[branch];
|
||||||
from: parseInt(match[1]),
|
if (!branchData || !branchData[osKey]) {
|
||||||
to: parseInt(match[2]),
|
throw new Error(`No data found for branch: ${branch}, OS: ${osKey}`);
|
||||||
key,
|
}
|
||||||
size: info.size
|
|
||||||
|
const osData = branchData[osKey];
|
||||||
|
|
||||||
|
const versions = Object.keys(osData).filter(key => key.endsWith('.pwr'));
|
||||||
|
|
||||||
|
if (versions.length === 0) {
|
||||||
|
throw new Error(`No .pwr files found for ${osKey}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const versionNumbers = versions.map(v => {
|
||||||
|
const match = v.match(/v(\d+)/);
|
||||||
|
return match ? parseInt(match[1]) : 0;
|
||||||
});
|
});
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return patches;
|
const latestVersionNumber = Math.max(...versionNumbers);
|
||||||
|
console.log(`[NewAPI] Latest version number: ${latestVersionNumber} for branch ${branch}`);
|
||||||
|
|
||||||
|
return `v${latestVersionNumber}`;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[NewAPI] Error getting latest version:', error.message);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
async function getPWRUrlFromNewAPI(branch = 'release', version = 'v8') {
|
||||||
* Find optimal patch path using BFS with download size minimization
|
try {
|
||||||
* Returns array of { from, to, url, size, key } steps, or null if no path found
|
const apiData = await fetchNewAPI();
|
||||||
*/
|
const osName = getOS();
|
||||||
async function findOptimalPatchPath(currentBuild, targetBuild, patches) {
|
const arch = getArch();
|
||||||
if (currentBuild >= targetBuild) return [];
|
|
||||||
|
|
||||||
const baseUrl = await getPatchesBaseUrl();
|
let osKey = osName;
|
||||||
const edges = {};
|
if (osName === 'darwin') {
|
||||||
for (const patch of patches) {
|
osKey = 'mac';
|
||||||
if (!edges[patch.from]) edges[patch.from] = [];
|
|
||||||
edges[patch.from].push(patch);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const queue = [{ build: currentBuild, path: [], totalSize: 0 }];
|
let fileName;
|
||||||
let bestPath = null;
|
if (osName === 'windows') {
|
||||||
let bestSize = Infinity;
|
fileName = `${version}-windows-amd64.pwr`;
|
||||||
|
} else if (osName === 'linux') {
|
||||||
while (queue.length > 0) {
|
fileName = `${version}-linux-amd64.pwr`;
|
||||||
const { build, path, totalSize } = queue.shift();
|
} else if (osName === 'darwin') {
|
||||||
|
fileName = `${version}-darwin-arm64.pwr`;
|
||||||
if (build === targetBuild) {
|
|
||||||
if (totalSize < bestSize) {
|
|
||||||
bestPath = path;
|
|
||||||
bestSize = totalSize;
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (totalSize >= bestSize) continue;
|
const branchData = apiData.hytale[branch];
|
||||||
|
if (!branchData || !branchData[osKey]) {
|
||||||
const nextEdges = edges[build] || [];
|
throw new Error(`No data found for branch: ${branch}, OS: ${osKey}`);
|
||||||
for (const edge of nextEdges) {
|
|
||||||
if (edge.to <= build || edge.to > targetBuild) continue;
|
|
||||||
if (path.some(p => p.to === edge.to)) continue;
|
|
||||||
|
|
||||||
queue.push({
|
|
||||||
build: edge.to,
|
|
||||||
path: [...path, {
|
|
||||||
from: edge.from,
|
|
||||||
to: edge.to,
|
|
||||||
url: `${baseUrl}/${edge.key}`,
|
|
||||||
size: edge.size,
|
|
||||||
key: edge.key
|
|
||||||
}],
|
|
||||||
totalSize: totalSize + edge.size
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return bestPath;
|
const osData = branchData[osKey];
|
||||||
}
|
const url = osData[fileName];
|
||||||
|
|
||||||
/**
|
if (!url) {
|
||||||
* Get the optimal update plan from currentBuild to targetBuild
|
throw new Error(`No URL found for ${fileName}`);
|
||||||
* Returns { steps: [{from, to, url, size}], totalSize, isFullInstall }
|
|
||||||
*/
|
|
||||||
async function getUpdatePlan(currentBuild, targetBuild, branch = 'release') {
|
|
||||||
const manifest = await fetchMirrorManifest();
|
|
||||||
const patches = getPlatformPatches(manifest, branch);
|
|
||||||
|
|
||||||
// Try optimal path
|
|
||||||
const steps = await findOptimalPatchPath(currentBuild, targetBuild, patches);
|
|
||||||
|
|
||||||
if (steps && steps.length > 0) {
|
|
||||||
const totalSize = steps.reduce((sum, s) => sum + s.size, 0);
|
|
||||||
console.log(`[Mirror] Update plan: ${steps.map(s => `${s.from}\u2192${s.to}`).join(' + ')} (${(totalSize / 1024 / 1024).toFixed(0)} MB)`);
|
|
||||||
return { steps, totalSize, isFullInstall: steps.length === 1 && steps[0].from === 0 };
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fallback: full install 0 -> target
|
console.log(`[NewAPI] URL for ${fileName}: ${url}`);
|
||||||
const fullPatch = patches.find(p => p.from === 0 && p.to === targetBuild);
|
return url;
|
||||||
if (fullPatch) {
|
} catch (error) {
|
||||||
const baseUrl = await getPatchesBaseUrl();
|
console.error('[NewAPI] Error getting PWR URL:', error.message);
|
||||||
const step = {
|
throw error;
|
||||||
from: 0,
|
|
||||||
to: targetBuild,
|
|
||||||
url: `${baseUrl}/${fullPatch.key}`,
|
|
||||||
size: fullPatch.size,
|
|
||||||
key: fullPatch.key
|
|
||||||
};
|
|
||||||
console.log(`[Mirror] Full install: 0\u2192${targetBuild} (${(fullPatch.size / 1024 / 1024).toFixed(0)} MB)`);
|
|
||||||
return { steps: [step], totalSize: fullPatch.size, isFullInstall: true };
|
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error(`No patch path found from build ${currentBuild} to ${targetBuild} for ${getOS()}/${getArch()}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getLatestClientVersion(branch = 'release') {
|
async function getLatestClientVersion(branch = 'release') {
|
||||||
try {
|
try {
|
||||||
console.log(`[Mirror] Fetching latest client version (branch: ${branch})...`);
|
console.log(`[NewAPI] Fetching latest client version from new API (branch: ${branch})...`);
|
||||||
const manifest = await fetchMirrorManifest();
|
|
||||||
const patches = getPlatformPatches(manifest, branch);
|
|
||||||
|
|
||||||
if (patches.length === 0) {
|
// Utiliser la nouvelle API
|
||||||
console.log(`[Mirror] No patches for branch '${branch}', using fallback`);
|
const latestVersion = await getLatestVersionFromNewAPI(branch);
|
||||||
return `v${FALLBACK_LATEST_BUILD}`;
|
console.log(`[NewAPI] Latest client version for ${branch}: ${latestVersion}`);
|
||||||
}
|
return latestVersion;
|
||||||
|
|
||||||
const latestBuild = Math.max(...patches.map(p => p.to));
|
|
||||||
console.log(`[Mirror] Latest client version: v${latestBuild}`);
|
|
||||||
return `v${latestBuild}`;
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('[Mirror] Error:', error.message);
|
console.error('[NewAPI] Error fetching client version from new API:', error.message);
|
||||||
return `v${FALLBACK_LATEST_BUILD}`;
|
console.log('[NewAPI] Falling back to old API...');
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get PWR download URL for fresh install (0 -> target)
|
|
||||||
* Backward-compatible with old getPWRUrlFromNewAPI signature
|
|
||||||
* Checks mirror first, then constructs URL for the branch
|
|
||||||
*/
|
|
||||||
async function getPWRUrl(branch = 'release', version = 'v11') {
|
|
||||||
const targetBuild = extractVersionNumber(version);
|
|
||||||
const os = getOS();
|
|
||||||
const arch = getArch();
|
|
||||||
|
|
||||||
|
// Fallback vers l'ancienne API si la nouvelle échoue
|
||||||
try {
|
try {
|
||||||
const manifest = await fetchMirrorManifest();
|
const response = await smartRequest(`https://files.hytalef2p.com/api/version_client?branch=${branch}`, {
|
||||||
const patches = getPlatformPatches(manifest, branch);
|
timeout: 40000,
|
||||||
const fullPatch = patches.find(p => p.from === 0 && p.to === targetBuild);
|
headers: {
|
||||||
|
'User-Agent': 'Hytale-F2P-Launcher'
|
||||||
if (fullPatch) {
|
|
||||||
const baseUrl = await getPatchesBaseUrl();
|
|
||||||
const url = `${baseUrl}/${fullPatch.key}`;
|
|
||||||
console.log(`[Mirror] PWR URL: ${url}`);
|
|
||||||
return url;
|
|
||||||
}
|
}
|
||||||
|
});
|
||||||
|
|
||||||
if (patches.length > 0) {
|
if (response.data && response.data.client_version) {
|
||||||
// Branch exists in mirror but no full patch for this target - construct URL
|
const version = response.data.client_version;
|
||||||
console.log(`[Mirror] No 0->${targetBuild} patch found, constructing URL`);
|
console.log(`Latest client version for ${branch} (old API): ${version}`);
|
||||||
|
return version;
|
||||||
} else {
|
} else {
|
||||||
console.log(`[Mirror] Branch '${branch}' not in mirror, constructing URL`);
|
console.log('Warning: Invalid API response, falling back to latest known version (v8)');
|
||||||
|
return 'v8';
|
||||||
|
}
|
||||||
|
} catch (fallbackError) {
|
||||||
|
console.error('Error fetching client version from old API:', fallbackError.message);
|
||||||
|
console.log('Warning: Both APIs unavailable, falling back to latest known version (v8)');
|
||||||
|
return 'v8';
|
||||||
}
|
}
|
||||||
} catch (error) {
|
|
||||||
console.error('[Mirror] Error getting PWR URL:', error.message);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Construct mirror URL (will work if patch was uploaded but manifest is stale)
|
|
||||||
const baseUrl = await getPatchesBaseUrl();
|
|
||||||
return `${baseUrl}/${os}/${arch}/${branch}/0_to_${targetBuild}.pwr`;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Backward-compatible alias
|
// Fonction utilitaire pour extraire le numéro de version
|
||||||
const getPWRUrlFromNewAPI = getPWRUrl;
|
// Supporte les formats: "7.pwr", "v8", "v8-windows-amd64.pwr", etc.
|
||||||
|
|
||||||
// Utility function to extract version number
|
|
||||||
// Supports: "7.pwr", "v8", "v8-windows-amd64.pwr", "5_to_10", etc.
|
|
||||||
function extractVersionNumber(version) {
|
function extractVersionNumber(version) {
|
||||||
if (!version) return 0;
|
if (!version) return 0;
|
||||||
|
|
||||||
// New format: "v8" or "v8-xxx.pwr"
|
// Nouveau format: "v8" ou "v8-xxx.pwr"
|
||||||
const vMatch = version.match(/v(\d+)/);
|
const vMatch = version.match(/v(\d+)/);
|
||||||
if (vMatch) return parseInt(vMatch[1]);
|
if (vMatch) {
|
||||||
|
return parseInt(vMatch[1]);
|
||||||
|
}
|
||||||
|
|
||||||
// Old format: "7.pwr"
|
// Ancien format: "7.pwr"
|
||||||
const pwrMatch = version.match(/(\d+)\.pwr/);
|
const pwrMatch = version.match(/(\d+)\.pwr/);
|
||||||
if (pwrMatch) return parseInt(pwrMatch[1]);
|
if (pwrMatch) {
|
||||||
|
return parseInt(pwrMatch[1]);
|
||||||
|
}
|
||||||
|
|
||||||
// Fallback
|
// Fallback: essayer de parser directement
|
||||||
const num = parseInt(version);
|
const num = parseInt(version);
|
||||||
return isNaN(num) ? 0 : num;
|
return isNaN(num) ? 0 : num;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function buildArchiveUrl(buildNumber, branch = 'release') {
|
function buildArchiveUrl(buildNumber, branch = 'release') {
|
||||||
const baseUrl = await getPatchesBaseUrl();
|
|
||||||
const os = getOS();
|
const os = getOS();
|
||||||
const arch = getArch();
|
const arch = getArch();
|
||||||
return `${baseUrl}/${os}/${arch}/${branch}/0_to_${buildNumber}.pwr`;
|
return `${BASE_PATCH_URL}/${os}/${arch}/${branch}/0/${buildNumber}.pwr`;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function checkArchiveExists(buildNumber, branch = 'release') {
|
async function checkArchiveExists(buildNumber, branch = 'release') {
|
||||||
const url = await buildArchiveUrl(buildNumber, branch);
|
const url = buildArchiveUrl(buildNumber, branch);
|
||||||
try {
|
try {
|
||||||
const response = await axios.head(url, { timeout: 10000 });
|
const response = await axios.head(url, { timeout: 10000 });
|
||||||
return response.status === 200;
|
return response.status === 200;
|
||||||
} catch {
|
} catch (error) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function discoverAvailableVersions(latestKnown, branch = 'release') {
|
async function discoverAvailableVersions(latestKnown, branch = 'release', maxProbe = 50) {
|
||||||
|
const available = [];
|
||||||
|
const latest = extractVersionNumber(latestKnown);
|
||||||
|
|
||||||
|
for (let i = latest; i >= Math.max(1, latest - maxProbe); i--) {
|
||||||
|
const exists = await checkArchiveExists(i, branch);
|
||||||
|
if (exists) {
|
||||||
|
available.push(`${i}.pwr`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return available;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchPatchManifest(branch = 'release') {
|
||||||
try {
|
try {
|
||||||
const manifest = await fetchMirrorManifest();
|
const os = getOS();
|
||||||
const patches = getPlatformPatches(manifest, branch);
|
const arch = getArch();
|
||||||
const versions = [...new Set(patches.map(p => p.to))].sort((a, b) => b - a);
|
const response = await smartRequest(`${MANIFEST_API}?branch=${branch}&os=${os}&arch=${arch}`, {
|
||||||
return versions.map(v => `${v}.pwr`);
|
timeout: 10000
|
||||||
} catch {
|
});
|
||||||
return [];
|
return response.data.patches || {};
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to fetch patch manifest:', error.message);
|
||||||
|
return {};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function extractVersionDetails(targetVersion, branch = 'release') {
|
async function extractVersionDetails(targetVersion, branch = 'release') {
|
||||||
const buildNumber = extractVersionNumber(targetVersion);
|
const buildNumber = extractVersionNumber(targetVersion);
|
||||||
const fullUrl = await buildArchiveUrl(buildNumber, branch);
|
const previousBuild = buildNumber - 1;
|
||||||
|
|
||||||
|
const manifest = await fetchPatchManifest(branch);
|
||||||
|
const patchInfo = manifest[buildNumber];
|
||||||
|
|
||||||
return {
|
return {
|
||||||
version: targetVersion,
|
version: targetVersion,
|
||||||
buildNumber,
|
buildNumber: buildNumber,
|
||||||
buildName: `HYTALE-Build-${buildNumber}`,
|
buildName: `HYTALE-Build-${buildNumber}`,
|
||||||
fullUrl,
|
fullUrl: patchInfo?.original_url || buildArchiveUrl(buildNumber, branch),
|
||||||
differentialUrl: null,
|
differentialUrl: patchInfo?.patch_url || null,
|
||||||
checksum: null,
|
checksum: patchInfo?.patch_hash || null,
|
||||||
sourceVersion: null,
|
sourceVersion: patchInfo?.from ? `${patchInfo.from}.pwr` : (previousBuild > 0 ? `${previousBuild}.pwr` : null),
|
||||||
isDifferential: false,
|
isDifferential: !!patchInfo?.proper_patch,
|
||||||
releaseNotes: null
|
releaseNotes: patchInfo?.patch_note || null
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function canUseDifferentialUpdate() {
|
function canUseDifferentialUpdate(currentVersion, targetDetails) {
|
||||||
// Differential updates are now handled via getUpdatePlan()
|
if (!targetDetails) return false;
|
||||||
return false;
|
if (!targetDetails.differentialUrl) return false;
|
||||||
|
if (!targetDetails.isDifferential) return false;
|
||||||
|
|
||||||
|
if (!currentVersion) return false;
|
||||||
|
|
||||||
|
const currentBuild = extractVersionNumber(currentVersion);
|
||||||
|
const expectedSource = extractVersionNumber(targetDetails.sourceVersion);
|
||||||
|
|
||||||
|
return currentBuild === expectedSource;
|
||||||
}
|
}
|
||||||
|
|
||||||
function needsIntermediatePatches(currentVersion, targetVersion) {
|
function needsIntermediatePatches(currentVersion, targetVersion) {
|
||||||
if (!currentVersion) return [];
|
if (!currentVersion) return [];
|
||||||
|
|
||||||
const current = extractVersionNumber(currentVersion);
|
const current = extractVersionNumber(currentVersion);
|
||||||
const target = extractVersionNumber(targetVersion);
|
const target = extractVersionNumber(targetVersion);
|
||||||
if (current >= target) return [];
|
|
||||||
return [targetVersion];
|
const intermediates = [];
|
||||||
|
for (let i = current + 1; i <= target; i++) {
|
||||||
|
intermediates.push(`${i}.pwr`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return intermediates;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function computeFileChecksum(filePath) {
|
async function computeFileChecksum(filePath) {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const hash = crypto.createHash('sha256');
|
const hash = crypto.createHash('sha256');
|
||||||
const stream = fs.createReadStream(filePath);
|
const stream = fs.createReadStream(filePath);
|
||||||
|
|
||||||
stream.on('data', data => hash.update(data));
|
stream.on('data', data => hash.update(data));
|
||||||
stream.on('end', () => resolve(hash.digest('hex')));
|
stream.on('end', () => resolve(hash.digest('hex')));
|
||||||
stream.on('error', reject);
|
stream.on('error', reject);
|
||||||
@@ -503,6 +290,7 @@ async function computeFileChecksum(filePath) {
|
|||||||
|
|
||||||
async function validateChecksum(filePath, expectedChecksum) {
|
async function validateChecksum(filePath, expectedChecksum) {
|
||||||
if (!expectedChecksum) return true;
|
if (!expectedChecksum) return true;
|
||||||
|
|
||||||
const actualChecksum = await computeFileChecksum(filePath);
|
const actualChecksum = await computeFileChecksum(filePath);
|
||||||
return actualChecksum === expectedChecksum;
|
return actualChecksum === expectedChecksum;
|
||||||
}
|
}
|
||||||
@@ -511,7 +299,7 @@ function getInstalledClientVersion() {
|
|||||||
try {
|
try {
|
||||||
const { loadVersionClient } = require('../core/config');
|
const { loadVersionClient } = require('../core/config');
|
||||||
return loadVersionClient();
|
return loadVersionClient();
|
||||||
} catch {
|
} catch (err) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -527,13 +315,8 @@ module.exports = {
|
|||||||
computeFileChecksum,
|
computeFileChecksum,
|
||||||
validateChecksum,
|
validateChecksum,
|
||||||
getInstalledClientVersion,
|
getInstalledClientVersion,
|
||||||
fetchMirrorManifest,
|
fetchNewAPI,
|
||||||
getPWRUrl,
|
getLatestVersionFromNewAPI,
|
||||||
getPWRUrlFromNewAPI,
|
getPWRUrlFromNewAPI,
|
||||||
getUpdatePlan,
|
extractVersionNumber
|
||||||
extractVersionNumber,
|
|
||||||
getPlatformPatches,
|
|
||||||
findOptimalPatchPath,
|
|
||||||
getPatchesBaseUrl,
|
|
||||||
getAllMirrorUrls
|
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -9,9 +9,7 @@ const MAX_DOMAIN_LENGTH = 16;
|
|||||||
|
|
||||||
// DualAuth ByteBuddy Agent (runtime class transformation, no JAR modification)
|
// DualAuth ByteBuddy Agent (runtime class transformation, no JAR modification)
|
||||||
const DUALAUTH_AGENT_URL = 'https://github.com/sanasol/hytale-auth-server/releases/latest/download/dualauth-agent.jar';
|
const DUALAUTH_AGENT_URL = 'https://github.com/sanasol/hytale-auth-server/releases/latest/download/dualauth-agent.jar';
|
||||||
const DUALAUTH_AGENT_VERSION_API = 'https://api.github.com/repos/sanasol/hytale-auth-server/releases/latest';
|
|
||||||
const DUALAUTH_AGENT_FILENAME = 'dualauth-agent.jar';
|
const DUALAUTH_AGENT_FILENAME = 'dualauth-agent.jar';
|
||||||
const DUALAUTH_AGENT_VERSION_FILE = 'dualauth-agent.version';
|
|
||||||
|
|
||||||
function getTargetDomain() {
|
function getTargetDomain() {
|
||||||
if (process.env.HYTALE_AUTH_DOMAIN) {
|
if (process.env.HYTALE_AUTH_DOMAIN) {
|
||||||
@@ -513,70 +511,30 @@ class ClientPatcher {
|
|||||||
*/
|
*/
|
||||||
async ensureAgentAvailable(serverDir, progressCallback) {
|
async ensureAgentAvailable(serverDir, progressCallback) {
|
||||||
const agentPath = this.getAgentPath(serverDir);
|
const agentPath = this.getAgentPath(serverDir);
|
||||||
const versionPath = path.join(serverDir, DUALAUTH_AGENT_VERSION_FILE);
|
|
||||||
|
|
||||||
console.log('=== DualAuth Agent (ByteBuddy) ===');
|
console.log('=== DualAuth Agent (ByteBuddy) ===');
|
||||||
console.log(`Target: ${agentPath}`);
|
console.log(`Target: ${agentPath}`);
|
||||||
|
|
||||||
// Check local version and whether file exists
|
// Check if agent already exists and is valid
|
||||||
let localVersion = null;
|
|
||||||
let agentExists = false;
|
|
||||||
if (fs.existsSync(agentPath)) {
|
if (fs.existsSync(agentPath)) {
|
||||||
try {
|
try {
|
||||||
const stats = fs.statSync(agentPath);
|
const stats = fs.statSync(agentPath);
|
||||||
if (stats.size > 1024) {
|
if (stats.size > 1024) {
|
||||||
agentExists = true;
|
console.log(`DualAuth Agent present (${(stats.size / 1024).toFixed(0)} KB)`);
|
||||||
if (fs.existsSync(versionPath)) {
|
if (progressCallback) progressCallback('DualAuth Agent ready', 100);
|
||||||
localVersion = fs.readFileSync(versionPath, 'utf8').trim();
|
return { success: true, agentPath, alreadyExists: true };
|
||||||
}
|
}
|
||||||
} else {
|
// File exists but too small - corrupt, re-download
|
||||||
console.log('Agent file appears corrupt, re-downloading...');
|
console.log('Agent file appears corrupt, re-downloading...');
|
||||||
fs.unlinkSync(agentPath);
|
fs.unlinkSync(agentPath);
|
||||||
}
|
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.warn('Could not check agent file:', e.message);
|
console.warn('Could not check agent file:', e.message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for updates from GitHub
|
|
||||||
let remoteVersion = null;
|
|
||||||
let needsDownload = !agentExists;
|
|
||||||
if (agentExists) {
|
|
||||||
try {
|
|
||||||
if (progressCallback) progressCallback('Checking for agent updates...', 5);
|
|
||||||
const axios = require('axios');
|
|
||||||
const resp = await axios.get(DUALAUTH_AGENT_VERSION_API, {
|
|
||||||
timeout: 5000,
|
|
||||||
headers: { 'Accept': 'application/vnd.github.v3+json' }
|
|
||||||
});
|
|
||||||
remoteVersion = resp.data.tag_name; // e.g. "v1.1.10"
|
|
||||||
if (localVersion && localVersion === remoteVersion) {
|
|
||||||
console.log(`DualAuth Agent up to date (${localVersion})`);
|
|
||||||
if (progressCallback) progressCallback('DualAuth Agent ready', 100);
|
|
||||||
return { success: true, agentPath, alreadyExists: true, version: localVersion };
|
|
||||||
}
|
|
||||||
console.log(`Agent update available: ${localVersion || 'unknown'} → ${remoteVersion}`);
|
|
||||||
needsDownload = true;
|
|
||||||
} catch (e) {
|
|
||||||
// GitHub API failed - use existing agent if available
|
|
||||||
console.warn(`Could not check for updates: ${e.message}`);
|
|
||||||
if (agentExists) {
|
|
||||||
console.log(`Using existing agent (${localVersion || 'unknown version'})`);
|
|
||||||
if (progressCallback) progressCallback('DualAuth Agent ready', 100);
|
|
||||||
return { success: true, agentPath, alreadyExists: true, version: localVersion };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!needsDownload) {
|
|
||||||
if (progressCallback) progressCallback('DualAuth Agent ready', 100);
|
|
||||||
return { success: true, agentPath, alreadyExists: true, version: localVersion };
|
|
||||||
}
|
|
||||||
|
|
||||||
// Download agent from GitHub releases
|
// Download agent from GitHub releases
|
||||||
const action = agentExists ? 'Updating' : 'Downloading';
|
if (progressCallback) progressCallback('Downloading DualAuth Agent...', 20);
|
||||||
if (progressCallback) progressCallback(`${action} DualAuth Agent...`, 20);
|
console.log(`Downloading from: ${DUALAUTH_AGENT_URL}`);
|
||||||
console.log(`${action} from: ${DUALAUTH_AGENT_URL}`);
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Ensure server directory exists
|
// Ensure server directory exists
|
||||||
@@ -590,7 +548,7 @@ class ClientPatcher {
|
|||||||
const stream = await smartDownloadStream(DUALAUTH_AGENT_URL, (chunk, downloadedBytes, total) => {
|
const stream = await smartDownloadStream(DUALAUTH_AGENT_URL, (chunk, downloadedBytes, total) => {
|
||||||
if (progressCallback && total) {
|
if (progressCallback && total) {
|
||||||
const percent = 20 + Math.floor((downloadedBytes / total) * 70);
|
const percent = 20 + Math.floor((downloadedBytes / total) * 70);
|
||||||
progressCallback(`${action} agent... ${(downloadedBytes / 1024).toFixed(0)} KB`, percent);
|
progressCallback(`Downloading agent... ${(downloadedBytes / 1024).toFixed(0)} KB`, percent);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -617,13 +575,9 @@ class ClientPatcher {
|
|||||||
}
|
}
|
||||||
fs.renameSync(tmpPath, agentPath);
|
fs.renameSync(tmpPath, agentPath);
|
||||||
|
|
||||||
// Save version
|
console.log(`DualAuth Agent downloaded (${(stats.size / 1024).toFixed(0)} KB)`);
|
||||||
const version = remoteVersion || 'unknown';
|
|
||||||
fs.writeFileSync(versionPath, version, 'utf8');
|
|
||||||
|
|
||||||
console.log(`DualAuth Agent ${agentExists ? 'updated' : 'downloaded'} (${(stats.size / 1024).toFixed(0)} KB, ${version})`);
|
|
||||||
if (progressCallback) progressCallback('DualAuth Agent ready', 100);
|
if (progressCallback) progressCallback('DualAuth Agent ready', 100);
|
||||||
return { success: true, agentPath, updated: agentExists, version };
|
return { success: true, agentPath };
|
||||||
|
|
||||||
} catch (downloadError) {
|
} catch (downloadError) {
|
||||||
console.error(`Failed to download DualAuth Agent: ${downloadError.message}`);
|
console.error(`Failed to download DualAuth Agent: ${downloadError.message}`);
|
||||||
@@ -632,11 +586,6 @@ class ClientPatcher {
|
|||||||
if (fs.existsSync(tmpPath)) {
|
if (fs.existsSync(tmpPath)) {
|
||||||
try { fs.unlinkSync(tmpPath); } catch (e) { /* ignore */ }
|
try { fs.unlinkSync(tmpPath); } catch (e) { /* ignore */ }
|
||||||
}
|
}
|
||||||
// If we had an existing agent, still use it
|
|
||||||
if (agentExists) {
|
|
||||||
console.log('Using existing agent despite update failure');
|
|
||||||
return { success: true, agentPath, alreadyExists: true, version: localVersion };
|
|
||||||
}
|
|
||||||
return { success: false, error: downloadError.message };
|
return { success: false, error: downloadError.message };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,2 +1,3 @@
|
|||||||
provider: generic
|
provider: github
|
||||||
url: https://git.sanhost.net/sanasol/hytale-f2p/releases/download/latest
|
owner: amiayweb # Change to your own GitHub username
|
||||||
|
repo: Hytale-F2P
|
||||||
|
|||||||
10
main.js
10
main.js
@@ -84,12 +84,12 @@ function setDiscordActivity() {
|
|||||||
largeImageText: 'Hytale F2P Launcher',
|
largeImageText: 'Hytale F2P Launcher',
|
||||||
buttons: [
|
buttons: [
|
||||||
{
|
{
|
||||||
label: 'Download',
|
label: 'GitHub',
|
||||||
url: 'https://git.sanhost.net/sanasol/hytale-f2p/releases'
|
url: 'https://github.com/amiayweb/Hytale-F2P'
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: 'Discord',
|
label: 'Discord',
|
||||||
url: 'https://discord.gg/Fhbb9Yk5WW'
|
url: 'https://discord.gg/hf2pdc'
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
@@ -964,8 +964,8 @@ ipcMain.handle('open-external', async (event, url) => {
|
|||||||
|
|
||||||
ipcMain.handle('open-download-page', async () => {
|
ipcMain.handle('open-download-page', async () => {
|
||||||
try {
|
try {
|
||||||
// Open Forgejo releases page for manual download
|
// Open GitHub releases page for manual download
|
||||||
await shell.openExternal('https://git.sanhost.net/sanasol/hytale-f2p/releases/latest');
|
await shell.openExternal('https://github.com/amiayweb/Hytale-F2P/releases/latest');
|
||||||
return { success: true };
|
return { success: true };
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Failed to open download page:', error);
|
console.error('Failed to open download page:', error);
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
{
|
{
|
||||||
"name": "hytale-f2p-launcher",
|
"name": "hytale-f2p-launcher",
|
||||||
"version": "2.3.8",
|
"version": "2.2.2",
|
||||||
"description": "A modern, cross-platform launcher for Hytale with automatic updates and multi-client support",
|
"description": "A modern, cross-platform launcher for Hytale with automatic updates and multi-client support",
|
||||||
"homepage": "https://git.sanhost.net/sanasol/hytale-f2p",
|
"homepage": "https://github.com/amiayweb/Hytale-F2P",
|
||||||
"main": "main.js",
|
"main": "main.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "electron .",
|
"start": "electron .",
|
||||||
@@ -118,8 +118,9 @@
|
|||||||
"createStartMenuShortcut": true
|
"createStartMenuShortcut": true
|
||||||
},
|
},
|
||||||
"publish": {
|
"publish": {
|
||||||
"provider": "generic",
|
"provider": "github",
|
||||||
"url": "https://git.sanhost.net/sanasol/hytale-f2p/releases/download/latest"
|
"owner": "amiayweb",
|
||||||
|
"repo": "Hytale-F2P"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,523 +0,0 @@
|
|||||||
#!/usr/bin/env node
|
|
||||||
/**
|
|
||||||
* UUID Persistence Tests
|
|
||||||
*
|
|
||||||
* Simulates the exact conditions that caused character data loss:
|
|
||||||
* - Config file corruption during updates
|
|
||||||
* - File locks making config temporarily unreadable
|
|
||||||
* - Username re-entry after config wipe
|
|
||||||
*
|
|
||||||
* Run: node test-uuid-persistence.js
|
|
||||||
*/
|
|
||||||
|
|
||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
|
||||||
const os = require('os');
|
|
||||||
|
|
||||||
// Use a temp directory so we don't mess with real config
|
|
||||||
const TEST_DIR = path.join(os.tmpdir(), 'hytale-uuid-test-' + Date.now());
|
|
||||||
const CONFIG_FILE = path.join(TEST_DIR, 'config.json');
|
|
||||||
const CONFIG_BACKUP = path.join(TEST_DIR, 'config.json.bak');
|
|
||||||
const CONFIG_TEMP = path.join(TEST_DIR, 'config.json.tmp');
|
|
||||||
const UUID_STORE_FILE = path.join(TEST_DIR, 'uuid-store.json');
|
|
||||||
|
|
||||||
// Track test results
|
|
||||||
let passed = 0;
|
|
||||||
let failed = 0;
|
|
||||||
const failures = [];
|
|
||||||
|
|
||||||
function assert(condition, message) {
|
|
||||||
if (condition) {
|
|
||||||
passed++;
|
|
||||||
console.log(` ✓ ${message}`);
|
|
||||||
} else {
|
|
||||||
failed++;
|
|
||||||
failures.push(message);
|
|
||||||
console.log(` ✗ FAIL: ${message}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function assertEqual(actual, expected, message) {
|
|
||||||
if (actual === expected) {
|
|
||||||
passed++;
|
|
||||||
console.log(` ✓ ${message}`);
|
|
||||||
} else {
|
|
||||||
failed++;
|
|
||||||
failures.push(`${message} (expected: ${expected}, got: ${actual})`);
|
|
||||||
console.log(` ✗ FAIL: ${message} (expected: "${expected}", got: "${actual}")`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function cleanup() {
|
|
||||||
try {
|
|
||||||
if (fs.existsSync(TEST_DIR)) {
|
|
||||||
fs.rmSync(TEST_DIR, { recursive: true });
|
|
||||||
}
|
|
||||||
} catch (e) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
function setup() {
|
|
||||||
cleanup();
|
|
||||||
fs.mkdirSync(TEST_DIR, { recursive: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// Inline the config functions so we can override paths
|
|
||||||
// (We can't require config.js directly because it uses hardcoded getAppDir())
|
|
||||||
// ============================================================================
|
|
||||||
|
|
||||||
function validateConfig(config) {
|
|
||||||
if (!config || typeof config !== 'object') return false;
|
|
||||||
if (config.userUuids !== undefined && typeof config.userUuids !== 'object') return false;
|
|
||||||
if (config.username !== undefined && (typeof config.username !== 'string')) return false;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
function loadConfig() {
|
|
||||||
try {
|
|
||||||
if (fs.existsSync(CONFIG_FILE)) {
|
|
||||||
const data = fs.readFileSync(CONFIG_FILE, 'utf8');
|
|
||||||
if (data.trim()) {
|
|
||||||
const config = JSON.parse(data);
|
|
||||||
if (validateConfig(config)) return config;
|
|
||||||
console.warn('[Config] Primary config invalid structure, trying backup...');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.error('[Config] Failed to load primary config:', err.message);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (fs.existsSync(CONFIG_BACKUP)) {
|
|
||||||
const data = fs.readFileSync(CONFIG_BACKUP, 'utf8');
|
|
||||||
if (data.trim()) {
|
|
||||||
const config = JSON.parse(data);
|
|
||||||
if (validateConfig(config)) {
|
|
||||||
console.log('[Config] Recovered from backup successfully');
|
|
||||||
try { fs.writeFileSync(CONFIG_FILE, data, 'utf8'); } catch (e) {}
|
|
||||||
return config;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (err) {}
|
|
||||||
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
function saveConfig(update) {
|
|
||||||
const maxRetries = 3;
|
|
||||||
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
|
||||||
try {
|
|
||||||
if (!fs.existsSync(TEST_DIR)) fs.mkdirSync(TEST_DIR, { recursive: true });
|
|
||||||
|
|
||||||
const currentConfig = loadConfig();
|
|
||||||
|
|
||||||
// SAFETY CHECK: refuse to save if file exists but loaded empty
|
|
||||||
if (Object.keys(currentConfig).length === 0 && fs.existsSync(CONFIG_FILE)) {
|
|
||||||
const fileSize = fs.statSync(CONFIG_FILE).size;
|
|
||||||
if (fileSize > 2) {
|
|
||||||
console.error(`[Config] REFUSING to save — loaded empty but file exists (${fileSize} bytes). Retrying...`);
|
|
||||||
const delay = attempt * 50; // shorter delay for tests
|
|
||||||
const start = Date.now();
|
|
||||||
while (Date.now() - start < delay) {}
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const newConfig = { ...currentConfig, ...update };
|
|
||||||
const data = JSON.stringify(newConfig, null, 2);
|
|
||||||
|
|
||||||
fs.writeFileSync(CONFIG_TEMP, data, 'utf8');
|
|
||||||
const verification = JSON.parse(fs.readFileSync(CONFIG_TEMP, 'utf8'));
|
|
||||||
if (!validateConfig(verification)) throw new Error('Validation failed');
|
|
||||||
|
|
||||||
if (fs.existsSync(CONFIG_FILE)) {
|
|
||||||
try {
|
|
||||||
const currentData = fs.readFileSync(CONFIG_FILE, 'utf8');
|
|
||||||
if (currentData.trim()) fs.writeFileSync(CONFIG_BACKUP, currentData, 'utf8');
|
|
||||||
} catch (e) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
fs.renameSync(CONFIG_TEMP, CONFIG_FILE);
|
|
||||||
return true;
|
|
||||||
} catch (err) {
|
|
||||||
try { if (fs.existsSync(CONFIG_TEMP)) fs.unlinkSync(CONFIG_TEMP); } catch (e) {}
|
|
||||||
if (attempt >= maxRetries) throw err;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function loadUuidStore() {
|
|
||||||
try {
|
|
||||||
if (fs.existsSync(UUID_STORE_FILE)) {
|
|
||||||
const data = fs.readFileSync(UUID_STORE_FILE, 'utf8');
|
|
||||||
if (data.trim()) return JSON.parse(data);
|
|
||||||
}
|
|
||||||
} catch (err) {}
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
function saveUuidStore(store) {
|
|
||||||
const tmpFile = UUID_STORE_FILE + '.tmp';
|
|
||||||
fs.writeFileSync(tmpFile, JSON.stringify(store, null, 2), 'utf8');
|
|
||||||
fs.renameSync(tmpFile, UUID_STORE_FILE);
|
|
||||||
}
|
|
||||||
|
|
||||||
function migrateUuidStoreIfNeeded() {
|
|
||||||
if (fs.existsSync(UUID_STORE_FILE)) return;
|
|
||||||
const config = loadConfig();
|
|
||||||
if (config.userUuids && Object.keys(config.userUuids).length > 0) {
|
|
||||||
console.log('[UUID Store] Migrating', Object.keys(config.userUuids).length, 'UUIDs');
|
|
||||||
saveUuidStore(config.userUuids);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function getUuidForUser(username) {
|
|
||||||
const { v4: uuidv4 } = require('uuid');
|
|
||||||
if (!username || !username.trim()) throw new Error('Username required');
|
|
||||||
|
|
||||||
const displayName = username.trim();
|
|
||||||
const normalizedLookup = displayName.toLowerCase();
|
|
||||||
|
|
||||||
migrateUuidStoreIfNeeded();
|
|
||||||
|
|
||||||
// 1. Check UUID store (source of truth)
|
|
||||||
const uuidStore = loadUuidStore();
|
|
||||||
const storeKey = Object.keys(uuidStore).find(k => k.toLowerCase() === normalizedLookup);
|
|
||||||
if (storeKey) {
|
|
||||||
const existingUuid = uuidStore[storeKey];
|
|
||||||
if (storeKey !== displayName) {
|
|
||||||
delete uuidStore[storeKey];
|
|
||||||
uuidStore[displayName] = existingUuid;
|
|
||||||
saveUuidStore(uuidStore);
|
|
||||||
}
|
|
||||||
// Sync to config (non-critical)
|
|
||||||
try {
|
|
||||||
const config = loadConfig();
|
|
||||||
const configUuids = config.userUuids || {};
|
|
||||||
const configKey = Object.keys(configUuids).find(k => k.toLowerCase() === normalizedLookup);
|
|
||||||
if (!configKey || configUuids[configKey] !== existingUuid) {
|
|
||||||
if (configKey) delete configUuids[configKey];
|
|
||||||
configUuids[displayName] = existingUuid;
|
|
||||||
saveConfig({ userUuids: configUuids });
|
|
||||||
}
|
|
||||||
} catch (e) {}
|
|
||||||
return existingUuid;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. Fallback: check config.json
|
|
||||||
const config = loadConfig();
|
|
||||||
const userUuids = config.userUuids || {};
|
|
||||||
const configKey = Object.keys(userUuids).find(k => k.toLowerCase() === normalizedLookup);
|
|
||||||
if (configKey) {
|
|
||||||
const recoveredUuid = userUuids[configKey];
|
|
||||||
uuidStore[displayName] = recoveredUuid;
|
|
||||||
saveUuidStore(uuidStore);
|
|
||||||
return recoveredUuid;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 3. New user — generate UUID
|
|
||||||
const newUuid = uuidv4();
|
|
||||||
uuidStore[displayName] = newUuid;
|
|
||||||
saveUuidStore(uuidStore);
|
|
||||||
userUuids[displayName] = newUuid;
|
|
||||||
saveConfig({ userUuids });
|
|
||||||
return newUuid;
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// OLD CODE (before fix) — for comparison testing
|
|
||||||
// ============================================================================
|
|
||||||
|
|
||||||
function getUuidForUser_OLD(username) {
|
|
||||||
const { v4: uuidv4 } = require('uuid');
|
|
||||||
if (!username || !username.trim()) throw new Error('Username required');
|
|
||||||
const displayName = username.trim();
|
|
||||||
const normalizedLookup = displayName.toLowerCase();
|
|
||||||
|
|
||||||
const config = loadConfig();
|
|
||||||
const userUuids = config.userUuids || {};
|
|
||||||
const existingKey = Object.keys(userUuids).find(k => k.toLowerCase() === normalizedLookup);
|
|
||||||
|
|
||||||
if (existingKey) {
|
|
||||||
return userUuids[existingKey];
|
|
||||||
}
|
|
||||||
|
|
||||||
// New user
|
|
||||||
const newUuid = uuidv4();
|
|
||||||
userUuids[displayName] = newUuid;
|
|
||||||
saveConfig({ userUuids });
|
|
||||||
return newUuid;
|
|
||||||
}
|
|
||||||
|
|
||||||
function saveConfig_OLD(update) {
|
|
||||||
// OLD saveConfig without safety check
|
|
||||||
if (!fs.existsSync(TEST_DIR)) fs.mkdirSync(TEST_DIR, { recursive: true });
|
|
||||||
const currentConfig = loadConfig();
|
|
||||||
// NO SAFETY CHECK — this is the bug
|
|
||||||
const newConfig = { ...currentConfig, ...update };
|
|
||||||
fs.writeFileSync(CONFIG_FILE, JSON.stringify(newConfig, null, 2), 'utf8');
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// TESTS
|
|
||||||
// ============================================================================
|
|
||||||
|
|
||||||
console.log('\n' + '='.repeat(70));
|
|
||||||
console.log('UUID PERSISTENCE TESTS — Simulating update corruption scenarios');
|
|
||||||
console.log('='.repeat(70));
|
|
||||||
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
// TEST 1: Normal flow — UUID stays consistent
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
console.log('\n--- Test 1: Normal flow — UUID stays consistent ---');
|
|
||||||
setup();
|
|
||||||
|
|
||||||
const uuid1 = getUuidForUser('SpecialK');
|
|
||||||
const uuid2 = getUuidForUser('SpecialK');
|
|
||||||
const uuid3 = getUuidForUser('specialk'); // case insensitive
|
|
||||||
|
|
||||||
assertEqual(uuid1, uuid2, 'Same username returns same UUID');
|
|
||||||
assertEqual(uuid1, uuid3, 'Case-insensitive lookup returns same UUID');
|
|
||||||
assert(uuid1.match(/^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i), 'UUID is valid v4 format');
|
|
||||||
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
// TEST 2: Simulate update corruption (THE BUG) — old code
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
console.log('\n--- Test 2: OLD CODE — Config wipe during update loses UUID ---');
|
|
||||||
setup();
|
|
||||||
|
|
||||||
// Setup: player has UUID
|
|
||||||
const oldConfig = { username: 'SpecialK', userUuids: { 'SpecialK': 'aaaaaaaa-bbbb-4ccc-9ddd-eeeeeeeeeeee' }, hasLaunchedBefore: true };
|
|
||||||
fs.writeFileSync(CONFIG_FILE, JSON.stringify(oldConfig, null, 2), 'utf8');
|
|
||||||
|
|
||||||
const uuidBefore = getUuidForUser_OLD('SpecialK');
|
|
||||||
assertEqual(uuidBefore, 'aaaaaaaa-bbbb-4ccc-9ddd-eeeeeeeeeeee', 'UUID correct before corruption');
|
|
||||||
|
|
||||||
// Simulate: config.json gets corrupted (loadConfig returns {} because file locked)
|
|
||||||
// This simulates what happens when saveConfig reads an empty/locked file
|
|
||||||
fs.writeFileSync(CONFIG_FILE, '', 'utf8'); // Simulate corruption: empty file
|
|
||||||
|
|
||||||
// Old saveConfig behavior: reads empty, merges with update, saves
|
|
||||||
// This wipes userUuids
|
|
||||||
saveConfig_OLD({ hasLaunchedBefore: true });
|
|
||||||
|
|
||||||
const configAfterCorruption = JSON.parse(fs.readFileSync(CONFIG_FILE, 'utf8'));
|
|
||||||
assert(!configAfterCorruption.userUuids, 'OLD CODE: userUuids wiped after corruption');
|
|
||||||
assert(!configAfterCorruption.username, 'OLD CODE: username wiped after corruption');
|
|
||||||
|
|
||||||
// Player re-enters name, gets NEW UUID (character data lost!)
|
|
||||||
const uuidAfterOld = getUuidForUser_OLD('SpecialK');
|
|
||||||
assert(uuidAfterOld !== uuidBefore, 'OLD CODE: UUID changed after corruption (BUG!)');
|
|
||||||
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
// TEST 3: NEW CODE — Config wipe during update, UUID survives via uuid-store
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
console.log('\n--- Test 3: NEW CODE — Config wipe + UUID survives via uuid-store ---');
|
|
||||||
setup();
|
|
||||||
|
|
||||||
// Setup: player has UUID (stored in both config.json AND uuid-store.json)
|
|
||||||
const initialConfig = { username: 'SpecialK', userUuids: { 'SpecialK': 'aaaaaaaa-bbbb-4ccc-9ddd-eeeeeeeeeeee' }, hasLaunchedBefore: true };
|
|
||||||
fs.writeFileSync(CONFIG_FILE, JSON.stringify(initialConfig, null, 2), 'utf8');
|
|
||||||
|
|
||||||
// First call migrates to uuid-store
|
|
||||||
const uuidFirst = getUuidForUser('SpecialK');
|
|
||||||
assertEqual(uuidFirst, 'aaaaaaaa-bbbb-4ccc-9ddd-eeeeeeeeeeee', 'UUID correct before corruption');
|
|
||||||
assert(fs.existsSync(UUID_STORE_FILE), 'uuid-store.json created');
|
|
||||||
|
|
||||||
// Simulate: config.json gets wiped (same as the update bug)
|
|
||||||
fs.writeFileSync(CONFIG_FILE, '{}', 'utf8');
|
|
||||||
|
|
||||||
// Verify config is empty
|
|
||||||
const wipedConfig = loadConfig();
|
|
||||||
assert(!wipedConfig.userUuids || Object.keys(wipedConfig.userUuids).length === 0, 'Config wiped — no userUuids');
|
|
||||||
assert(!wipedConfig.username, 'Config wiped — no username');
|
|
||||||
|
|
||||||
// Player re-enters same name → UUID recovered from uuid-store!
|
|
||||||
const uuidAfterNew = getUuidForUser('SpecialK');
|
|
||||||
assertEqual(uuidAfterNew, 'aaaaaaaa-bbbb-4ccc-9ddd-eeeeeeeeeeee', 'NEW CODE: UUID preserved after config wipe!');
|
|
||||||
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
// TEST 4: saveConfig safety check — refuses to overwrite good data with empty
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
console.log('\n--- Test 4: saveConfig safety check — blocks destructive writes ---');
|
|
||||||
setup();
|
|
||||||
|
|
||||||
// Setup: valid config file with data
|
|
||||||
const goodConfig = { username: 'SpecialK', userUuids: { 'SpecialK': 'aaaaaaaa-bbbb-4ccc-9ddd-eeeeeeeeeeee' }, hasLaunchedBefore: true, installPath: 'C:\\Games\\Hytale' };
|
|
||||||
fs.writeFileSync(CONFIG_FILE, JSON.stringify(goodConfig, null, 2), 'utf8');
|
|
||||||
|
|
||||||
// Make the file temporarily unreadable by writing garbage (simulates file lock/corruption)
|
|
||||||
const originalContent = fs.readFileSync(CONFIG_FILE, 'utf8');
|
|
||||||
fs.writeFileSync(CONFIG_FILE, 'NOT VALID JSON!!!', 'utf8');
|
|
||||||
|
|
||||||
// Try to save — should refuse because file exists but can't be parsed
|
|
||||||
let saveThrew = false;
|
|
||||||
try {
|
|
||||||
saveConfig({ someNewField: true });
|
|
||||||
} catch (e) {
|
|
||||||
saveThrew = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// The file should still have the garbage (not overwritten with { someNewField: true })
|
|
||||||
const afterContent = fs.readFileSync(CONFIG_FILE, 'utf8');
|
|
||||||
|
|
||||||
// Restore original for backup recovery test
|
|
||||||
fs.writeFileSync(CONFIG_FILE, JSON.stringify(goodConfig, null, 2), 'utf8');
|
|
||||||
|
|
||||||
// Note: with invalid JSON, loadConfig returns {} and safety check triggers
|
|
||||||
// The save may eventually succeed on retry if the file becomes readable
|
|
||||||
// What matters is that it doesn't blindly overwrite
|
|
||||||
assert(afterContent !== '{\n "someNewField": true\n}', 'Safety check prevented blind overwrite of corrupted file');
|
|
||||||
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
// TEST 5: Backup recovery — config.json corrupted, recovered from .bak
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
console.log('\n--- Test 5: Backup recovery — auto-recover from .bak ---');
|
|
||||||
setup();
|
|
||||||
|
|
||||||
// Create config and backup
|
|
||||||
const validConfig = { username: 'SpecialK', userUuids: { 'SpecialK': 'aaaaaaaa-bbbb-4ccc-9ddd-eeeeeeeeeeee' } };
|
|
||||||
fs.writeFileSync(CONFIG_BACKUP, JSON.stringify(validConfig, null, 2), 'utf8');
|
|
||||||
fs.writeFileSync(CONFIG_FILE, 'CORRUPTED', 'utf8');
|
|
||||||
|
|
||||||
const recovered = loadConfig();
|
|
||||||
assertEqual(recovered.username, 'SpecialK', 'Username recovered from backup');
|
|
||||||
assert(recovered.userUuids && recovered.userUuids['SpecialK'] === 'aaaaaaaa-bbbb-4ccc-9ddd-eeeeeeeeeeee', 'UUID recovered from backup');
|
|
||||||
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
// TEST 6: Full update simulation — the exact scenario from player report
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
console.log('\n--- Test 6: Full update simulation (player report scenario) ---');
|
|
||||||
setup();
|
|
||||||
|
|
||||||
// Step 1: Player installs v2.3.4, sets username, plays game
|
|
||||||
console.log(' Step 1: Player sets up profile...');
|
|
||||||
saveConfig({ username: 'Special K', hasLaunchedBefore: true });
|
|
||||||
const originalUuid = getUuidForUser('Special K');
|
|
||||||
console.log(` Original UUID: ${originalUuid}`);
|
|
||||||
|
|
||||||
// Step 2: v2.3.5 auto-update — new app launches
|
|
||||||
console.log(' Step 2: Simulating v2.3.5 update...');
|
|
||||||
|
|
||||||
// Simulate the 3 saveConfig calls that happen during startup
|
|
||||||
// But first, simulate config being temporarily locked (returns empty)
|
|
||||||
const preUpdateContent = fs.readFileSync(CONFIG_FILE, 'utf8');
|
|
||||||
fs.writeFileSync(CONFIG_FILE, '', 'utf8'); // Simulate: file empty during write (race condition)
|
|
||||||
|
|
||||||
// These are the 3 calls from: profileManager.init, migrateUserDataToCentralized, handleFirstLaunchCheck
|
|
||||||
// With our safety check, they should NOT wipe the data
|
|
||||||
try { saveConfig({ hasLaunchedBefore: true }); } catch (e) { /* expected — safety check blocks it */ }
|
|
||||||
|
|
||||||
// Simulate file becomes readable again (antivirus releases lock)
|
|
||||||
fs.writeFileSync(CONFIG_FILE, preUpdateContent, 'utf8');
|
|
||||||
|
|
||||||
// Step 3: Player re-enters username (because UI might show empty)
|
|
||||||
console.log(' Step 3: Player re-enters username...');
|
|
||||||
const postUpdateUuid = getUuidForUser('Special K');
|
|
||||||
console.log(` Post-update UUID: ${postUpdateUuid}`);
|
|
||||||
|
|
||||||
assertEqual(postUpdateUuid, originalUuid, 'UUID survived the full update cycle!');
|
|
||||||
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
// TEST 7: Multiple users — UUIDs stay independent
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
console.log('\n--- Test 7: Multiple users — UUIDs stay independent ---');
|
|
||||||
setup();
|
|
||||||
|
|
||||||
const uuidAlice = getUuidForUser('Alice');
|
|
||||||
const uuidBob = getUuidForUser('Bob');
|
|
||||||
const uuidCharlie = getUuidForUser('Charlie');
|
|
||||||
|
|
||||||
assert(uuidAlice !== uuidBob, 'Alice and Bob have different UUIDs');
|
|
||||||
assert(uuidBob !== uuidCharlie, 'Bob and Charlie have different UUIDs');
|
|
||||||
|
|
||||||
// Wipe config, all should survive
|
|
||||||
fs.writeFileSync(CONFIG_FILE, '{}', 'utf8');
|
|
||||||
|
|
||||||
assertEqual(getUuidForUser('Alice'), uuidAlice, 'Alice UUID survived config wipe');
|
|
||||||
assertEqual(getUuidForUser('Bob'), uuidBob, 'Bob UUID survived config wipe');
|
|
||||||
assertEqual(getUuidForUser('Charlie'), uuidCharlie, 'Charlie UUID survived config wipe');
|
|
||||||
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
// TEST 8: UUID store deleted — recovery from config.json
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
console.log('\n--- Test 8: UUID store deleted — recovery from config.json ---');
|
|
||||||
setup();
|
|
||||||
|
|
||||||
// Create UUID via normal flow (saves to both stores)
|
|
||||||
const uuidOriginal = getUuidForUser('TestPlayer');
|
|
||||||
|
|
||||||
// Delete uuid-store.json (simulates user manually deleting it or disk issue)
|
|
||||||
fs.unlinkSync(UUID_STORE_FILE);
|
|
||||||
assert(!fs.existsSync(UUID_STORE_FILE), 'uuid-store.json deleted');
|
|
||||||
|
|
||||||
// UUID should be recovered from config.json
|
|
||||||
const uuidRecovered = getUuidForUser('TestPlayer');
|
|
||||||
assertEqual(uuidRecovered, uuidOriginal, 'UUID recovered from config.json after uuid-store deletion');
|
|
||||||
assert(fs.existsSync(UUID_STORE_FILE), 'uuid-store.json recreated after recovery');
|
|
||||||
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
// TEST 9: Both stores deleted — new UUID generated (fresh install)
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
console.log('\n--- Test 9: Both stores deleted — new UUID (fresh install) ---');
|
|
||||||
setup();
|
|
||||||
|
|
||||||
const uuidFresh = getUuidForUser('NewPlayer');
|
|
||||||
|
|
||||||
// Delete both
|
|
||||||
fs.unlinkSync(UUID_STORE_FILE);
|
|
||||||
fs.unlinkSync(CONFIG_FILE);
|
|
||||||
|
|
||||||
const uuidAfterWipe = getUuidForUser('NewPlayer');
|
|
||||||
assert(uuidAfterWipe !== uuidFresh, 'New UUID generated when both stores are gone (expected for true fresh install)');
|
|
||||||
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
// TEST 10: Worst case — config.json wiped AND uuid-store.json exists
|
|
||||||
// Simulates the EXACT player-reported scenario with new code
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
console.log('\n--- Test 10: Exact player scenario with new code ---');
|
|
||||||
setup();
|
|
||||||
|
|
||||||
// Player has been playing for a while
|
|
||||||
saveConfig({
|
|
||||||
username: 'Special K',
|
|
||||||
hasLaunchedBefore: true,
|
|
||||||
installPath: 'C:\\Games\\Hytale',
|
|
||||||
version_client: '2026.02.19-1a311a592',
|
|
||||||
version_branch: 'release',
|
|
||||||
userUuids: { 'Special K': '11111111-2222-4333-9444-555555555555' }
|
|
||||||
});
|
|
||||||
|
|
||||||
// First call creates uuid-store.json
|
|
||||||
const originalUuid10 = getUuidForUser('Special K');
|
|
||||||
assertEqual(originalUuid10, '11111111-2222-4333-9444-555555555555', 'Original UUID loaded');
|
|
||||||
|
|
||||||
// BOOM: Update happens, config.json completely wiped
|
|
||||||
fs.writeFileSync(CONFIG_FILE, '{}', 'utf8');
|
|
||||||
|
|
||||||
// Username lost — player has to re-enter
|
|
||||||
const loadedUsername = loadConfig().username;
|
|
||||||
assert(!loadedUsername, 'Username is gone from config (simulating what player saw)');
|
|
||||||
|
|
||||||
// Player types "Special K" again in settings
|
|
||||||
saveConfig({ username: 'Special K' });
|
|
||||||
|
|
||||||
// Player clicks Play — getUuidForUser called
|
|
||||||
const recoveredUuid10 = getUuidForUser('Special K');
|
|
||||||
assertEqual(recoveredUuid10, '11111111-2222-4333-9444-555555555555', 'UUID recovered — character data preserved!');
|
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// RESULTS
|
|
||||||
// ============================================================================
|
|
||||||
console.log('\n' + '='.repeat(70));
|
|
||||||
console.log(`RESULTS: ${passed} passed, ${failed} failed`);
|
|
||||||
if (failed > 0) {
|
|
||||||
console.log('\nFailures:');
|
|
||||||
failures.forEach(f => console.log(` ✗ ${f}`));
|
|
||||||
}
|
|
||||||
console.log('='.repeat(70));
|
|
||||||
|
|
||||||
cleanup();
|
|
||||||
process.exit(failed > 0 ? 1 : 0);
|
|
||||||
Reference in New Issue
Block a user