mirror of
https://git.sanhost.net/sanasol/hytale-f2p
synced 2026-02-26 16:21:49 -03:00
Compare commits
15 Commits
v2.3.0-tes
...
v2.3.8
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
44834e7d12 | ||
|
|
cb7f7e51bf | ||
|
|
9b20c454d3 | ||
|
|
4e04d657b7 | ||
|
|
6d811fd7e0 | ||
|
|
8435fc698c | ||
|
|
6c369edb0f | ||
|
|
fdd8e59ec4 | ||
|
|
e7a033932f | ||
|
|
11c6d40dfe | ||
|
|
0dafb17c7b | ||
|
|
66112f15b2 | ||
|
|
0a71fdac8c | ||
|
|
4b9eae215b | ||
|
|
1510eceb0f |
2
.github/CODE_OF_CONDUCT.md
vendored
2
.github/CODE_OF_CONDUCT.md
vendored
@@ -36,7 +36,7 @@ This Code of Conduct applies within all community spaces, and also applies when
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at [Discord Server, message Founders/Devs](https://discord.gg/hf2pdc). All complaints will be reviewed and investigated promptly and fairly.
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at [Discord Server, message Founders/Devs](https://discord.gg/Fhbb9Yk5WW). All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the reporter of any incident.
|
||||
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/support_request.yml
vendored
2
.github/ISSUE_TEMPLATE/support_request.yml
vendored
@@ -22,7 +22,7 @@ body:
|
||||
value: |
|
||||
If you need help or support with using the launcher, please fill out this support request.
|
||||
Provide as much detail as possible so we can assist you effectively.
|
||||
**Need a quick assistance?** Please Open-A-Ticket in our [Discord Server](https://discord.gg/gME8rUy3MB)!
|
||||
**Need a quick assistance?** Please Open-A-Ticket in our [Discord Server](https://discord.gg/Fhbb9Yk5WW)!
|
||||
|
||||
- type: textarea
|
||||
id: question
|
||||
|
||||
8
.github/workflows/release.yml
vendored
8
.github/workflows/release.yml
vendored
@@ -9,8 +9,8 @@ on:
|
||||
env:
|
||||
# Domain for small API calls (goes through Cloudflare - fine for <100MB)
|
||||
FORGEJO_API: https://git.sanhost.net/api/v1
|
||||
# Direct to Forgejo port (bypasses Cloudflare + Traefik for large uploads)
|
||||
FORGEJO_UPLOAD: http://208.69.78.130:3001/api/v1
|
||||
# Direct upload URL (bypasses Cloudflare for large files) - set in repo secrets
|
||||
FORGEJO_UPLOAD: ${{ secrets.FORGEJO_UPLOAD_URL }}
|
||||
|
||||
jobs:
|
||||
create-release:
|
||||
@@ -107,13 +107,13 @@ jobs:
|
||||
- run: npm ci
|
||||
|
||||
- name: Build Linux Packages
|
||||
run: npx electron-builder --linux AppImage deb rpm --publish never
|
||||
run: npx electron-builder --linux AppImage deb rpm pacman --publish never
|
||||
|
||||
- name: Upload to Release
|
||||
run: |
|
||||
RELEASE_ID=$(curl -s "${FORGEJO_API}/repos/${GITHUB_REPOSITORY}/releases/tags/${{ github.ref_name }}" \
|
||||
-H "Authorization: token ${{ secrets.RELEASE_TOKEN }}" | python3 -c 'import sys,json; print(json.load(sys.stdin)["id"])')
|
||||
for file in dist/*.AppImage dist/*.AppImage.blockmap dist/*.deb dist/*.rpm dist/latest-linux.yml; do
|
||||
for file in dist/*.AppImage dist/*.AppImage.blockmap dist/*.deb dist/*.rpm dist/*.pacman dist/latest-linux.yml; do
|
||||
[ -f "$file" ] || continue
|
||||
echo "Uploading $file..."
|
||||
curl -s --max-time 600 -X POST "${FORGEJO_UPLOAD}/repos/${GITHUB_REPOSITORY}/releases/${RELEASE_ID}/assets?name=$(basename $file)" \
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -17,6 +17,9 @@ dist/
|
||||
# Project Specific: Downloaded patcher (from hytale-auth-server)
|
||||
backend/patcher/
|
||||
|
||||
# Private docs (local only)
|
||||
docs/PATCH_CDN_INFRASTRUCTURE.md
|
||||
|
||||
# macOS Specific
|
||||
.DS_Store
|
||||
*.zst.DS_Store
|
||||
|
||||
@@ -53,7 +53,7 @@ window.closeDiscordPopup = function() {
|
||||
};
|
||||
|
||||
window.joinDiscord = async function() {
|
||||
await window.electronAPI?.openExternal('https://discord.gg/hf2pdc');
|
||||
await window.electronAPI?.openExternal('https://discord.gg/Fhbb9Yk5WW');
|
||||
|
||||
try {
|
||||
await window.electronAPI?.saveConfig({ discordPopup: true });
|
||||
|
||||
@@ -1103,7 +1103,7 @@ function getRetryContextMessage() {
|
||||
}
|
||||
|
||||
window.openDiscordExternal = function() {
|
||||
window.electronAPI?.openExternal('https://discord.gg/hf2pdc');
|
||||
window.electronAPI?.openExternal('https://discord.gg/Fhbb9Yk5WW');
|
||||
};
|
||||
|
||||
window.toggleMaximize = toggleMaximize;
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
|
||||
### ⚠️ **WARNING: READ [QUICK START](#-quick-start) before Downloading & Installing the Launcher!** ⚠️
|
||||
|
||||
#### 🛑 **Found a problem? [Join the HF2P Discord](https://discord.gg/hf2pdc) and head to `#-⚠️-community-help`** 🛑
|
||||
#### 🛑 **Found a problem? [Join the HF2P Discord](https://discord.gg/Fhbb9Yk5WW) and head to `#-⚠️-community-help`** 🛑
|
||||
|
||||
<p>
|
||||
👍 If you like the project, <b>feel free to support us via Buy Me a Coffee!</b> ☕<br>
|
||||
@@ -455,7 +455,7 @@ See [BUILD.md](docs/BUILD.md) for comprehensive build instructions.
|
||||
<div align="center">
|
||||
|
||||
**Questions? Ads? Collaboration? Endorsement? Other business-related?**
|
||||
Message the founders at https://discord.gg/hf2pdc
|
||||
Message the founders at https://discord.gg/Fhbb9Yk5WW
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
Play with friends online! This guide covers both easy in-game hosting and advanced dedicated server setup.
|
||||
|
||||
### **DOWNLOAD SERVER FILES (JAR/RAR/SCRIPTS) HERE: https://discord.gg/hf2pdc**
|
||||
### **DOWNLOAD SERVER FILES (JAR/RAR/SCRIPTS) HERE: https://discord.gg/Fhbb9Yk5WW**
|
||||
|
||||
**Table of Contents**
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Hytale F2P Launcher - Troubleshooting Guide
|
||||
|
||||
This guide covers common issues and their solutions. If your issue isn't listed here, please check [existing issues](https://github.com/amiayweb/Hytale-F2P/issues) or join our [Discord](https://discord.gg/gME8rUy3MB).
|
||||
This guide covers common issues and their solutions. If your issue isn't listed here, please check [existing issues](https://github.com/amiayweb/Hytale-F2P/issues) or join our [Discord](https://discord.gg/Fhbb9Yk5WW).
|
||||
|
||||
---
|
||||
|
||||
@@ -437,7 +437,7 @@ Game sessions have a 10-hour TTL. This is by design for security.
|
||||
If your issue isn't resolved by this guide:
|
||||
|
||||
1. **Check existing issues:** [GitHub Issues](https://github.com/amiayweb/Hytale-F2P/issues)
|
||||
2. **Join Discord:** [discord.gg/gME8rUy3MB](https://discord.gg/gME8rUy3MB)
|
||||
2. **Join Discord:** [discord.gg/Fhbb9Yk5WW](https://discord.gg/Fhbb9Yk5WW)
|
||||
3. **Open a new issue** with:
|
||||
- Your operating system and version
|
||||
- Launcher version
|
||||
|
||||
@@ -4,6 +4,10 @@ const logger = require('./logger');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const https = require('https');
|
||||
|
||||
const FORGEJO_API = 'https://git.sanhost.net/api/v1';
|
||||
const FORGEJO_REPO = 'sanasol/hytale-f2p';
|
||||
|
||||
class AppUpdater {
|
||||
constructor(mainWindow) {
|
||||
@@ -14,6 +18,34 @@ class AppUpdater {
|
||||
this.setupAutoUpdater();
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch the latest non-draft release tag from Forgejo and set the feed URL
|
||||
*/
|
||||
async _resolveUpdateUrl() {
|
||||
return new Promise((resolve, reject) => {
|
||||
https.get(`${FORGEJO_API}/repos/${FORGEJO_REPO}/releases?limit=5`, (res) => {
|
||||
let data = '';
|
||||
res.on('data', (chunk) => data += chunk);
|
||||
res.on('end', () => {
|
||||
try {
|
||||
const releases = JSON.parse(data);
|
||||
const latest = releases.find(r => !r.draft && !r.prerelease);
|
||||
if (latest) {
|
||||
const url = `https://git.sanhost.net/${FORGEJO_REPO}/releases/download/${latest.tag_name}`;
|
||||
console.log(`Auto-update URL resolved to: ${url}`);
|
||||
autoUpdater.setFeedURL({ provider: 'generic', url });
|
||||
resolve(url);
|
||||
} else {
|
||||
reject(new Error('No published release found'));
|
||||
}
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
}
|
||||
});
|
||||
}).on('error', reject);
|
||||
});
|
||||
}
|
||||
|
||||
setupAutoUpdater() {
|
||||
|
||||
// Configure logger for electron-updater
|
||||
@@ -216,8 +248,10 @@ class AppUpdater {
|
||||
}
|
||||
|
||||
checkForUpdatesAndNotify() {
|
||||
// Check for updates and notify if available
|
||||
autoUpdater.checkForUpdatesAndNotify().catch(err => {
|
||||
// Resolve latest release URL then check for updates
|
||||
this._resolveUpdateUrl().catch(err => {
|
||||
console.warn('Failed to resolve update URL:', err.message);
|
||||
}).then(() => autoUpdater.checkForUpdatesAndNotify()).catch(err => {
|
||||
console.error('Failed to check for updates:', err);
|
||||
|
||||
// Network errors are not critical - just log and continue
|
||||
@@ -245,8 +279,10 @@ class AppUpdater {
|
||||
}
|
||||
|
||||
checkForUpdates() {
|
||||
// Manual check for updates (returns promise)
|
||||
return autoUpdater.checkForUpdates().catch(err => {
|
||||
// Manual check - resolve latest release URL first
|
||||
return this._resolveUpdateUrl().catch(err => {
|
||||
console.warn('Failed to resolve update URL:', err.message);
|
||||
}).then(() => autoUpdater.checkForUpdates()).catch(err => {
|
||||
console.error('Failed to check for updates:', err);
|
||||
|
||||
// Network errors are not critical - just return no update available
|
||||
|
||||
@@ -54,6 +54,7 @@ function getAppDir() {
|
||||
const CONFIG_FILE = path.join(getAppDir(), 'config.json');
|
||||
const CONFIG_BACKUP = path.join(getAppDir(), 'config.json.bak');
|
||||
const CONFIG_TEMP = path.join(getAppDir(), 'config.json.tmp');
|
||||
const UUID_STORE_FILE = path.join(getAppDir(), 'uuid-store.json');
|
||||
|
||||
// =============================================================================
|
||||
// CONFIG VALIDATION
|
||||
@@ -152,6 +153,22 @@ function saveConfig(update) {
|
||||
|
||||
// Load current config
|
||||
const currentConfig = loadConfig();
|
||||
|
||||
// SAFETY: If config file exists on disk but loadConfig() returned empty,
|
||||
// something is wrong (file locked, corrupted, etc.). Refuse to save
|
||||
// because merging with {} would wipe all existing data (userUuids, username, etc.)
|
||||
if (Object.keys(currentConfig).length === 0 && fs.existsSync(CONFIG_FILE)) {
|
||||
const fileSize = fs.statSync(CONFIG_FILE).size;
|
||||
if (fileSize > 2) { // More than just "{}"
|
||||
console.error(`[Config] REFUSING to save — loaded empty but file exists (${fileSize} bytes). Retrying load...`);
|
||||
// Wait and retry the load
|
||||
const delay = attempt * 200;
|
||||
const start = Date.now();
|
||||
while (Date.now() - start < delay) { /* busy wait */ }
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
const newConfig = { ...currentConfig, ...update };
|
||||
const data = JSON.stringify(newConfig, null, 2);
|
||||
|
||||
@@ -238,11 +255,18 @@ function saveUsername(username) {
|
||||
// Check if we're actually changing the username (case-insensitive comparison)
|
||||
const isRename = currentName && currentName.toLowerCase() !== newName.toLowerCase();
|
||||
|
||||
// Also update UUID store (source of truth)
|
||||
migrateUuidStoreIfNeeded();
|
||||
const uuidStore = loadUuidStore();
|
||||
|
||||
if (isRename) {
|
||||
// Find the UUID for the current username
|
||||
const currentKey = Object.keys(userUuids).find(
|
||||
k => k.toLowerCase() === currentName.toLowerCase()
|
||||
);
|
||||
const currentStoreKey = Object.keys(uuidStore).find(
|
||||
k => k.toLowerCase() === currentName.toLowerCase()
|
||||
);
|
||||
|
||||
if (currentKey && userUuids[currentKey]) {
|
||||
// Check if target username already exists (would be a different identity)
|
||||
@@ -258,6 +282,9 @@ function saveUsername(username) {
|
||||
const uuid = userUuids[currentKey];
|
||||
delete userUuids[currentKey];
|
||||
userUuids[newName] = uuid;
|
||||
// Same in UUID store
|
||||
if (currentStoreKey) delete uuidStore[currentStoreKey];
|
||||
uuidStore[newName] = uuid;
|
||||
console.log(`[Config] Renamed identity: "${currentKey}" → "${newName}" (UUID preserved: ${uuid})`);
|
||||
}
|
||||
}
|
||||
@@ -270,11 +297,20 @@ function saveUsername(username) {
|
||||
const uuid = userUuids[currentKey];
|
||||
delete userUuids[currentKey];
|
||||
userUuids[newName] = uuid;
|
||||
// Same in UUID store
|
||||
const storeKey = Object.keys(uuidStore).find(k => k.toLowerCase() === currentName.toLowerCase());
|
||||
if (storeKey) {
|
||||
delete uuidStore[storeKey];
|
||||
uuidStore[newName] = uuid;
|
||||
}
|
||||
console.log(`[Config] Updated username case: "${currentKey}" → "${newName}"`);
|
||||
}
|
||||
}
|
||||
|
||||
// Save both username and updated userUuids
|
||||
// Save UUID store
|
||||
saveUuidStore(uuidStore);
|
||||
|
||||
// Save both username and updated userUuids to config
|
||||
saveConfig({ username: newName, userUuids });
|
||||
console.log(`[Config] Username saved: "${newName}"`);
|
||||
return newName;
|
||||
@@ -310,6 +346,7 @@ function hasUsername() {
|
||||
|
||||
// =============================================================================
|
||||
// UUID MANAGEMENT - Persistent and safe
|
||||
// Uses separate uuid-store.json as source of truth (survives config.json corruption)
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
@@ -320,10 +357,55 @@ function normalizeUsername(username) {
|
||||
return username.trim().toLowerCase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Load UUID store from separate file (independent of config.json)
|
||||
*/
|
||||
function loadUuidStore() {
|
||||
try {
|
||||
if (fs.existsSync(UUID_STORE_FILE)) {
|
||||
const data = fs.readFileSync(UUID_STORE_FILE, 'utf8');
|
||||
if (data.trim()) {
|
||||
return JSON.parse(data);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('[UUID Store] Failed to load:', err.message);
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Save UUID store to separate file (atomic write)
|
||||
*/
|
||||
function saveUuidStore(store) {
|
||||
try {
|
||||
const dir = path.dirname(UUID_STORE_FILE);
|
||||
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
|
||||
const tmpFile = UUID_STORE_FILE + '.tmp';
|
||||
fs.writeFileSync(tmpFile, JSON.stringify(store, null, 2), 'utf8');
|
||||
fs.renameSync(tmpFile, UUID_STORE_FILE);
|
||||
} catch (err) {
|
||||
console.error('[UUID Store] Failed to save:', err.message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* One-time migration: copy userUuids from config.json to uuid-store.json
|
||||
*/
|
||||
function migrateUuidStoreIfNeeded() {
|
||||
if (fs.existsSync(UUID_STORE_FILE)) return; // Already migrated
|
||||
const config = loadConfig();
|
||||
if (config.userUuids && Object.keys(config.userUuids).length > 0) {
|
||||
console.log('[UUID Store] Migrating', Object.keys(config.userUuids).length, 'UUIDs from config.json');
|
||||
saveUuidStore(config.userUuids);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get UUID for a username
|
||||
* Creates new UUID only if user explicitly doesn't exist
|
||||
* Uses case-insensitive lookup to prevent duplicates, but preserves original case for display
|
||||
* Source of truth: uuid-store.json (separate from config.json)
|
||||
* Also writes to config.json for backward compatibility
|
||||
* Creates new UUID only if user doesn't exist in EITHER store
|
||||
*/
|
||||
function getUuidForUser(username) {
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
@@ -335,32 +417,69 @@ function getUuidForUser(username) {
|
||||
const displayName = username.trim();
|
||||
const normalizedLookup = displayName.toLowerCase();
|
||||
|
||||
const config = loadConfig();
|
||||
const userUuids = config.userUuids || {};
|
||||
// Ensure UUID store exists (one-time migration from config.json)
|
||||
migrateUuidStoreIfNeeded();
|
||||
|
||||
// Case-insensitive lookup - find existing key regardless of case
|
||||
const existingKey = Object.keys(userUuids).find(k => k.toLowerCase() === normalizedLookup);
|
||||
// 1. Check UUID store first (source of truth)
|
||||
const uuidStore = loadUuidStore();
|
||||
const storeKey = Object.keys(uuidStore).find(k => k.toLowerCase() === normalizedLookup);
|
||||
|
||||
if (existingKey) {
|
||||
// Found existing - return UUID, update display name if case changed
|
||||
const existingUuid = userUuids[existingKey];
|
||||
if (storeKey) {
|
||||
const existingUuid = uuidStore[storeKey];
|
||||
|
||||
// If user typed different case, update the key to new case (preserving UUID)
|
||||
if (existingKey !== displayName) {
|
||||
console.log(`[Config] Updating username case: "${existingKey}" → "${displayName}"`);
|
||||
delete userUuids[existingKey];
|
||||
userUuids[displayName] = existingUuid;
|
||||
saveConfig({ userUuids });
|
||||
// Update case if needed
|
||||
if (storeKey !== displayName) {
|
||||
console.log(`[UUID Store] Updating username case: "${storeKey}" → "${displayName}"`);
|
||||
delete uuidStore[storeKey];
|
||||
uuidStore[displayName] = existingUuid;
|
||||
saveUuidStore(uuidStore);
|
||||
}
|
||||
|
||||
// Sync to config.json (backward compat, non-critical)
|
||||
try {
|
||||
const config = loadConfig();
|
||||
const configUuids = config.userUuids || {};
|
||||
const configKey = Object.keys(configUuids).find(k => k.toLowerCase() === normalizedLookup);
|
||||
if (!configKey || configUuids[configKey] !== existingUuid) {
|
||||
if (configKey) delete configUuids[configKey];
|
||||
configUuids[displayName] = existingUuid;
|
||||
saveConfig({ userUuids: configUuids });
|
||||
}
|
||||
} catch (e) {
|
||||
// Non-critical — UUID store is the source of truth
|
||||
}
|
||||
|
||||
console.log(`[UUID] ${displayName} → ${existingUuid} (from uuid-store)`);
|
||||
return existingUuid;
|
||||
}
|
||||
|
||||
// Create new UUID for new user - store with original case
|
||||
// 2. Fallback: check config.json (recovery if uuid-store.json was lost)
|
||||
const config = loadConfig();
|
||||
const userUuids = config.userUuids || {};
|
||||
const configKey = Object.keys(userUuids).find(k => k.toLowerCase() === normalizedLookup);
|
||||
|
||||
if (configKey) {
|
||||
const recoveredUuid = userUuids[configKey];
|
||||
console.warn(`[UUID] RECOVERED "${displayName}" → ${recoveredUuid} from config.json (uuid-store was missing)`);
|
||||
|
||||
// Save to UUID store
|
||||
uuidStore[displayName] = recoveredUuid;
|
||||
saveUuidStore(uuidStore);
|
||||
|
||||
return recoveredUuid;
|
||||
}
|
||||
|
||||
// 3. New user — generate UUID, save to BOTH stores
|
||||
const newUuid = uuidv4();
|
||||
console.log(`[UUID] NEW user "${displayName}" → ${newUuid}`);
|
||||
|
||||
// Save to UUID store (source of truth)
|
||||
uuidStore[displayName] = newUuid;
|
||||
saveUuidStore(uuidStore);
|
||||
|
||||
// Save to config.json (backward compat)
|
||||
userUuids[displayName] = newUuid;
|
||||
saveConfig({ userUuids });
|
||||
console.log(`[Config] Created new UUID for "${displayName}": ${newUuid}`);
|
||||
|
||||
return newUuid;
|
||||
}
|
||||
@@ -380,22 +499,26 @@ function getCurrentUuid() {
|
||||
* Get all UUID mappings (raw object)
|
||||
*/
|
||||
function getAllUuidMappings() {
|
||||
const config = loadConfig();
|
||||
return config.userUuids || {};
|
||||
migrateUuidStoreIfNeeded();
|
||||
const uuidStore = loadUuidStore();
|
||||
// Fallback to config if uuid-store is empty
|
||||
if (Object.keys(uuidStore).length === 0) {
|
||||
const config = loadConfig();
|
||||
return config.userUuids || {};
|
||||
}
|
||||
return uuidStore;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all UUID mappings as array with current user flag
|
||||
*/
|
||||
function getAllUuidMappingsArray() {
|
||||
const config = loadConfig();
|
||||
const userUuids = config.userUuids || {};
|
||||
const allMappings = getAllUuidMappings();
|
||||
const currentUsername = loadUsername();
|
||||
// Case-insensitive comparison for isCurrent
|
||||
const normalizedCurrent = currentUsername ? currentUsername.toLowerCase() : null;
|
||||
|
||||
return Object.entries(userUuids).map(([username, uuid]) => ({
|
||||
username, // Original case preserved
|
||||
return Object.entries(allMappings).map(([username, uuid]) => ({
|
||||
username,
|
||||
uuid,
|
||||
isCurrent: username.toLowerCase() === normalizedCurrent
|
||||
}));
|
||||
@@ -419,16 +542,20 @@ function setUuidForUser(username, uuid) {
|
||||
|
||||
const displayName = username.trim();
|
||||
const normalizedLookup = displayName.toLowerCase();
|
||||
|
||||
// 1. Update UUID store (source of truth)
|
||||
migrateUuidStoreIfNeeded();
|
||||
const uuidStore = loadUuidStore();
|
||||
const storeKey = Object.keys(uuidStore).find(k => k.toLowerCase() === normalizedLookup);
|
||||
if (storeKey) delete uuidStore[storeKey];
|
||||
uuidStore[displayName] = uuid;
|
||||
saveUuidStore(uuidStore);
|
||||
|
||||
// 2. Update config.json (backward compat)
|
||||
const config = loadConfig();
|
||||
const userUuids = config.userUuids || {};
|
||||
|
||||
// Remove any existing entry with same name (case-insensitive)
|
||||
const existingKey = Object.keys(userUuids).find(k => k.toLowerCase() === normalizedLookup);
|
||||
if (existingKey) {
|
||||
delete userUuids[existingKey];
|
||||
}
|
||||
|
||||
// Store with original case
|
||||
if (existingKey) delete userUuids[existingKey];
|
||||
userUuids[displayName] = uuid;
|
||||
saveConfig({ userUuids });
|
||||
|
||||
@@ -454,20 +581,30 @@ function deleteUuidForUser(username) {
|
||||
}
|
||||
|
||||
const normalizedLookup = username.trim().toLowerCase();
|
||||
let deleted = false;
|
||||
|
||||
// 1. Delete from UUID store (source of truth)
|
||||
migrateUuidStoreIfNeeded();
|
||||
const uuidStore = loadUuidStore();
|
||||
const storeKey = Object.keys(uuidStore).find(k => k.toLowerCase() === normalizedLookup);
|
||||
if (storeKey) {
|
||||
delete uuidStore[storeKey];
|
||||
saveUuidStore(uuidStore);
|
||||
deleted = true;
|
||||
}
|
||||
|
||||
// 2. Delete from config.json (backward compat)
|
||||
const config = loadConfig();
|
||||
const userUuids = config.userUuids || {};
|
||||
|
||||
// Case-insensitive lookup
|
||||
const existingKey = Object.keys(userUuids).find(k => k.toLowerCase() === normalizedLookup);
|
||||
|
||||
if (existingKey) {
|
||||
delete userUuids[existingKey];
|
||||
saveConfig({ userUuids });
|
||||
console.log(`[Config] UUID deleted for "${username}"`);
|
||||
return true;
|
||||
deleted = true;
|
||||
}
|
||||
|
||||
return false;
|
||||
if (deleted) console.log(`[Config] UUID deleted for "${username}"`);
|
||||
return deleted;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -788,5 +925,6 @@ module.exports = {
|
||||
loadVersionBranch,
|
||||
|
||||
// Constants
|
||||
CONFIG_FILE
|
||||
CONFIG_FILE,
|
||||
UUID_STORE_FILE
|
||||
};
|
||||
|
||||
@@ -3,7 +3,7 @@ const path = require('path');
|
||||
const { execFile } = require('child_process');
|
||||
const { downloadFile, retryDownload } = require('../utils/fileManager');
|
||||
const { getOS, getArch } = require('../utils/platformUtils');
|
||||
const { validateChecksum, extractVersionDetails, canUseDifferentialUpdate, needsIntermediatePatches, getInstalledClientVersion } = require('../services/versionManager');
|
||||
const { validateChecksum, extractVersionDetails, getInstalledClientVersion, getUpdatePlan, extractVersionNumber, getAllMirrorUrls, getPatchesBaseUrl } = require('../services/versionManager');
|
||||
const { installButler } = require('./butlerManager');
|
||||
const { GAME_DIR, CACHE_DIR, TOOLS_DIR } = require('../core/paths');
|
||||
const { saveVersionClient } = require('../core/config');
|
||||
@@ -30,16 +30,63 @@ async function acquireGameArchive(downloadUrl, targetPath, checksum, progressCal
|
||||
}
|
||||
|
||||
console.log(`Downloading game archive from: ${downloadUrl}`);
|
||||
|
||||
try {
|
||||
if (allowRetry) {
|
||||
await retryDownload(downloadUrl, targetPath, progressCallback);
|
||||
} else {
|
||||
await downloadFile(downloadUrl, targetPath, progressCallback);
|
||||
|
||||
// Try primary URL first, then mirror URLs on timeout/connection failure
|
||||
const mirrors = await getAllMirrorUrls();
|
||||
const primaryBase = await getPatchesBaseUrl();
|
||||
const urlsToTry = [downloadUrl];
|
||||
|
||||
// Build mirror URLs by replacing the base URL
|
||||
for (const mirror of mirrors) {
|
||||
if (mirror !== primaryBase && downloadUrl.startsWith(primaryBase)) {
|
||||
const mirrorUrl = downloadUrl.replace(primaryBase, mirror);
|
||||
if (!urlsToTry.includes(mirrorUrl)) {
|
||||
urlsToTry.push(mirrorUrl);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
const enhancedError = new Error(`Archive download failed: ${error.message}`);
|
||||
enhancedError.originalError = error;
|
||||
}
|
||||
|
||||
let lastError;
|
||||
for (let i = 0; i < urlsToTry.length; i++) {
|
||||
const url = urlsToTry[i];
|
||||
try {
|
||||
if (i > 0) {
|
||||
console.log(`[Download] Trying mirror ${i}: ${url}`);
|
||||
if (progressCallback) {
|
||||
progressCallback(`Trying alternative mirror (${i}/${urlsToTry.length - 1})...`, 0, null, null, null);
|
||||
}
|
||||
// Clean up partial download from previous attempt
|
||||
if (fs.existsSync(targetPath)) {
|
||||
try { fs.unlinkSync(targetPath); } catch (e) {}
|
||||
}
|
||||
}
|
||||
if (allowRetry) {
|
||||
await retryDownload(url, targetPath, progressCallback);
|
||||
} else {
|
||||
await downloadFile(url, targetPath, progressCallback);
|
||||
}
|
||||
lastError = null;
|
||||
break; // Success
|
||||
} catch (error) {
|
||||
lastError = error;
|
||||
const isConnectionError = error.message && (
|
||||
error.message.includes('ETIMEDOUT') ||
|
||||
error.message.includes('ECONNREFUSED') ||
|
||||
error.message.includes('ECONNABORTED') ||
|
||||
error.message.includes('timeout')
|
||||
);
|
||||
if (isConnectionError && i < urlsToTry.length - 1) {
|
||||
console.warn(`[Download] Connection failed (${error.message}), will try mirror...`);
|
||||
continue;
|
||||
}
|
||||
// Non-connection error or last mirror — throw
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (lastError) {
|
||||
const enhancedError = new Error(`Archive download failed: ${lastError.message}`);
|
||||
enhancedError.originalError = lastError;
|
||||
enhancedError.downloadUrl = downloadUrl;
|
||||
enhancedError.targetPath = targetPath;
|
||||
throw enhancedError;
|
||||
@@ -103,13 +150,13 @@ async function deployGameArchive(archivePath, destinationDir, toolsDir, progress
|
||||
if (error) {
|
||||
const cleanStderr = stderr.replace(/[\u2714\u2716\u2713\u2717\u26A0\uD83D[\uDC00-\uDFFF]]/g, '').trim();
|
||||
const cleanStdout = stdout.replace(/[\u2714\u2716\u2713\u2717\u26A0\uD83D[\uDC00-\uDFFF]]/g, '').trim();
|
||||
|
||||
|
||||
if (cleanStderr) console.error('Deployment stderr:', cleanStderr);
|
||||
if (cleanStdout) console.error('Deployment stdout:', cleanStdout);
|
||||
|
||||
|
||||
const errorText = (stderr + ' ' + error.message).toLowerCase();
|
||||
let message = 'Game deployment failed';
|
||||
|
||||
|
||||
if (errorText.includes('unexpected eof')) {
|
||||
message = 'Corrupted archive detected. Please retry download.';
|
||||
if (fs.existsSync(archivePath)) {
|
||||
@@ -156,20 +203,20 @@ async function performIntelligentUpdate(targetVersion, branch = 'release', progr
|
||||
console.log(`Initiating intelligent update to version ${targetVersion}`);
|
||||
|
||||
const currentVersion = getInstalledClientVersion();
|
||||
console.log(`Current version: ${currentVersion || 'none (clean install)'}`);
|
||||
console.log(`Target version: ${targetVersion}`);
|
||||
console.log(`Branch: ${branch}`);
|
||||
const currentBuild = extractVersionNumber(currentVersion) || 0;
|
||||
const targetBuild = extractVersionNumber(targetVersion);
|
||||
console.log(`Current build: ${currentBuild}, Target build: ${targetBuild}, Branch: ${branch}`);
|
||||
|
||||
// For non-release branches, always do full install
|
||||
if (branch !== 'release') {
|
||||
console.log(`Pre-release branch detected - forcing full archive download`);
|
||||
console.log('Pre-release branch detected - forcing full archive download');
|
||||
const versionDetails = await extractVersionDetails(targetVersion, branch);
|
||||
const archiveName = path.basename(versionDetails.fullUrl);
|
||||
const archivePath = path.join(cacheDir, `${branch}_${archiveName}`);
|
||||
|
||||
const archivePath = path.join(cacheDir, `${branch}_0_to_${targetBuild}.pwr`);
|
||||
|
||||
if (progressCallback) {
|
||||
progressCallback('Downloading full game archive (pre-release)...', 0, null, null, null);
|
||||
}
|
||||
|
||||
|
||||
await acquireGameArchive(versionDetails.fullUrl, archivePath, null, progressCallback);
|
||||
await deployGameArchive(archivePath, gameDir, toolsDir, progressCallback, false);
|
||||
saveVersionClient(targetVersion);
|
||||
@@ -177,16 +224,16 @@ async function performIntelligentUpdate(targetVersion, branch = 'release', progr
|
||||
return;
|
||||
}
|
||||
|
||||
if (!currentVersion) {
|
||||
// Clean install (no current version)
|
||||
if (currentBuild === 0) {
|
||||
console.log('No existing installation detected - downloading full archive');
|
||||
const versionDetails = await extractVersionDetails(targetVersion, branch);
|
||||
const archiveName = path.basename(versionDetails.fullUrl);
|
||||
const archivePath = path.join(cacheDir, `${branch}_${archiveName}`);
|
||||
|
||||
const archivePath = path.join(cacheDir, `${branch}_0_to_${targetBuild}.pwr`);
|
||||
|
||||
if (progressCallback) {
|
||||
progressCallback(`Downloading full game archive (first install - v${targetVersion})...`, 0, null, null, null);
|
||||
progressCallback(`Downloading full game archive (first install - v${targetBuild})...`, 0, null, null, null);
|
||||
}
|
||||
|
||||
|
||||
await acquireGameArchive(versionDetails.fullUrl, archivePath, null, progressCallback);
|
||||
await deployGameArchive(archivePath, gameDir, toolsDir, progressCallback, false);
|
||||
saveVersionClient(targetVersion);
|
||||
@@ -194,59 +241,67 @@ async function performIntelligentUpdate(targetVersion, branch = 'release', progr
|
||||
return;
|
||||
}
|
||||
|
||||
const patchesToApply = needsIntermediatePatches(currentVersion, targetVersion);
|
||||
|
||||
if (patchesToApply.length === 0) {
|
||||
console.log('Already at target version or invalid version sequence');
|
||||
// Already at target
|
||||
if (currentBuild >= targetBuild) {
|
||||
console.log('Already at target version or newer');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Applying ${patchesToApply.length} differential patch(es): ${patchesToApply.join(' -> ')}`);
|
||||
// Use mirror's update plan for optimal patch routing
|
||||
try {
|
||||
const plan = await getUpdatePlan(currentBuild, targetBuild, branch);
|
||||
|
||||
console.log(`Applying ${plan.steps.length} patch(es): ${plan.steps.map(s => `${s.from}\u2192${s.to}`).join(' + ')}`);
|
||||
|
||||
for (let i = 0; i < plan.steps.length; i++) {
|
||||
const step = plan.steps[i];
|
||||
const stepName = `${step.from}_to_${step.to}`;
|
||||
const archivePath = path.join(cacheDir, `${branch}_${stepName}.pwr`);
|
||||
const isDifferential = step.from !== 0;
|
||||
|
||||
for (let i = 0; i < patchesToApply.length; i++) {
|
||||
const patchVersion = patchesToApply[i];
|
||||
const versionDetails = await extractVersionDetails(patchVersion, branch);
|
||||
|
||||
const canDifferential = canUseDifferentialUpdate(getInstalledClientVersion(), versionDetails);
|
||||
|
||||
if (!canDifferential || !versionDetails.differentialUrl) {
|
||||
console.log(`WARNING: Differential patch not available for ${patchVersion}, using full archive`);
|
||||
const archiveName = path.basename(versionDetails.fullUrl);
|
||||
const archivePath = path.join(cacheDir, `${branch}_${archiveName}`);
|
||||
|
||||
if (progressCallback) {
|
||||
progressCallback(`Downloading full archive for ${patchVersion} (${i + 1}/${patchesToApply.length})...`, 0, null, null, null);
|
||||
progressCallback(`Downloading patch ${i + 1}/${plan.steps.length}: ${stepName}...`, 0, null, null, null);
|
||||
}
|
||||
|
||||
await acquireGameArchive(versionDetails.fullUrl, archivePath, null, progressCallback);
|
||||
await deployGameArchive(archivePath, gameDir, toolsDir, progressCallback, false);
|
||||
} else {
|
||||
console.log(`Applying differential patch: ${versionDetails.sourceVersion} -> ${patchVersion}`);
|
||||
const archiveName = path.basename(versionDetails.differentialUrl);
|
||||
const archivePath = path.join(cacheDir, `${branch}_patch_${archiveName}`);
|
||||
|
||||
|
||||
await acquireGameArchive(step.url, archivePath, null, progressCallback);
|
||||
|
||||
if (progressCallback) {
|
||||
progressCallback(`Applying patch ${i + 1}/${patchesToApply.length}: ${patchVersion}...`, 0, null, null, null);
|
||||
progressCallback(`Applying patch ${i + 1}/${plan.steps.length}: ${stepName}...`, 50, null, null, null);
|
||||
}
|
||||
|
||||
await acquireGameArchive(versionDetails.differentialUrl, archivePath, versionDetails.checksum, progressCallback);
|
||||
await deployGameArchive(archivePath, gameDir, toolsDir, progressCallback, true);
|
||||
|
||||
|
||||
await deployGameArchive(archivePath, gameDir, toolsDir, progressCallback, isDifferential);
|
||||
|
||||
// Clean up patch file
|
||||
if (fs.existsSync(archivePath)) {
|
||||
try {
|
||||
fs.unlinkSync(archivePath);
|
||||
console.log(`Cleaned up patch file: ${archiveName}`);
|
||||
console.log(`Cleaned up: ${stepName}.pwr`);
|
||||
} catch (cleanupErr) {
|
||||
console.warn(`Failed to cleanup patch file: ${cleanupErr.message}`);
|
||||
console.warn(`Failed to cleanup: ${cleanupErr.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
saveVersionClient(patchVersion);
|
||||
console.log(`Patch ${patchVersion} applied successfully (${i + 1}/${patchesToApply.length})`);
|
||||
}
|
||||
|
||||
console.log(`Update completed successfully. Version ${targetVersion} is now installed.`);
|
||||
saveVersionClient(`v${step.to}`);
|
||||
console.log(`Patch ${stepName} applied (${i + 1}/${plan.steps.length})`);
|
||||
}
|
||||
|
||||
console.log(`Update completed. Version ${targetVersion} is now installed.`);
|
||||
} catch (planError) {
|
||||
console.error('Update plan failed:', planError.message);
|
||||
console.log('Falling back to full archive download');
|
||||
|
||||
// Fallback: full install
|
||||
const versionDetails = await extractVersionDetails(targetVersion, branch);
|
||||
const archivePath = path.join(cacheDir, `${branch}_0_to_${targetBuild}.pwr`);
|
||||
|
||||
if (progressCallback) {
|
||||
progressCallback(`Downloading full game archive (fallback)...`, 0, null, null, null);
|
||||
}
|
||||
|
||||
await acquireGameArchive(versionDetails.fullUrl, archivePath, null, progressCallback);
|
||||
await deployGameArchive(archivePath, gameDir, toolsDir, progressCallback, false);
|
||||
saveVersionClient(targetVersion);
|
||||
}
|
||||
}
|
||||
|
||||
async function ensureGameInstalled(targetVersion, branch = 'release', progressCallback, gameDir = GAME_DIR, cacheDir = CACHE_DIR, toolsDir = TOOLS_DIR) {
|
||||
|
||||
@@ -61,12 +61,39 @@ async function fetchAuthTokens(uuid, name) {
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
console.log('Auth tokens received from server');
|
||||
const identityToken = data.IdentityToken || data.identityToken;
|
||||
const sessionToken = data.SessionToken || data.sessionToken;
|
||||
|
||||
return {
|
||||
identityToken: data.IdentityToken || data.identityToken,
|
||||
sessionToken: data.SessionToken || data.sessionToken
|
||||
};
|
||||
// Verify the identity token has the correct username
|
||||
// This catches cases where the auth server defaults to "Player"
|
||||
try {
|
||||
const parts = identityToken.split('.');
|
||||
if (parts.length >= 2) {
|
||||
const payload = JSON.parse(Buffer.from(parts[1], 'base64url').toString());
|
||||
if (payload.username && payload.username !== name && name !== 'Player') {
|
||||
console.warn(`[Auth] Token username mismatch: token has "${payload.username}", expected "${name}". Retrying...`);
|
||||
// Retry once with explicit name
|
||||
const retryResponse = await fetch(`${authServerUrl}/game-session/child`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ uuid: uuid, name: name, scopes: ['hytale:server', 'hytale:client'] })
|
||||
});
|
||||
if (retryResponse.ok) {
|
||||
const retryData = await retryResponse.json();
|
||||
console.log('[Auth] Retry successful');
|
||||
return {
|
||||
identityToken: retryData.IdentityToken || retryData.identityToken,
|
||||
sessionToken: retryData.SessionToken || retryData.sessionToken
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (verifyErr) {
|
||||
console.warn('[Auth] Token verification skipped:', verifyErr.message);
|
||||
}
|
||||
|
||||
console.log('Auth tokens received from server');
|
||||
return { identityToken, sessionToken };
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch auth tokens:', error.message);
|
||||
// Fallback to local generation if server unavailable
|
||||
@@ -223,6 +250,7 @@ async function launchGame(playerNameOverride = null, progressCallback, javaPathO
|
||||
}
|
||||
|
||||
const uuid = getUuidForUser(playerName);
|
||||
console.log(`[Launcher] UUID for "${playerName}": ${uuid} (verify this stays constant across launches)`);
|
||||
|
||||
// Fetch tokens from auth server
|
||||
if (progressCallback) {
|
||||
@@ -412,7 +440,7 @@ exec "$REAL_JAVA" "\${ARGS[@]}"
|
||||
// This enables runtime auth patching without modifying the server JAR
|
||||
const agentJar = path.join(gameLatest, 'Server', 'dualauth-agent.jar');
|
||||
if (fs.existsSync(agentJar)) {
|
||||
const agentFlag = `-javaagent:${agentJar}`;
|
||||
const agentFlag = `-javaagent:"${agentJar}"`;
|
||||
env.JAVA_TOOL_OPTIONS = env.JAVA_TOOL_OPTIONS
|
||||
? `${env.JAVA_TOOL_OPTIONS} ${agentFlag}`
|
||||
: agentFlag;
|
||||
|
||||
@@ -5,7 +5,7 @@ const { promisify } = require('util');
|
||||
const { getResolvedAppDir, findClientPath, findUserDataPath, findUserDataRecursive, GAME_DIR, CACHE_DIR, TOOLS_DIR } = require('../core/paths');
|
||||
const { getOS, getArch } = require('../utils/platformUtils');
|
||||
const { downloadFile, retryDownload, retryStalledDownload, MAX_AUTOMATIC_STALL_RETRIES } = require('../utils/fileManager');
|
||||
const { getLatestClientVersion, getInstalledClientVersion } = require('../services/versionManager');
|
||||
const { getLatestClientVersion, getInstalledClientVersion, getUpdatePlan, extractVersionNumber } = require('../services/versionManager');
|
||||
const { FORCE_CLEAN_INSTALL_VERSION, CLEAN_INSTALL_TEST_VERSION } = require('../core/testConfig');
|
||||
const { installButler } = require('./butlerManager');
|
||||
const { downloadAndReplaceHomePageUI, downloadAndReplaceLogo } = require('./uiFileManager');
|
||||
@@ -64,7 +64,7 @@ async function safeRemoveDirectory(dirPath, maxRetries = 3) {
|
||||
}
|
||||
}
|
||||
|
||||
async function downloadPWR(branch = 'release', fileName = 'v8', progressCallback, cacheDir = CACHE_DIR, manualRetry = false) {
|
||||
async function downloadPWR(branch = 'release', fileName = 'v8', progressCallback, cacheDir = CACHE_DIR, manualRetry = false, directUrl = null, expectedSize = null) {
|
||||
const osName = getOS();
|
||||
const arch = getArch();
|
||||
|
||||
@@ -72,43 +72,69 @@ async function downloadPWR(branch = 'release', fileName = 'v8', progressCallback
|
||||
throw new Error('Hytale x86_64 Intel Mac Support has not been released yet. Please check back later.');
|
||||
}
|
||||
|
||||
const { getPWRUrlFromNewAPI } = require('../services/versionManager');
|
||||
|
||||
let url;
|
||||
let isUsingNewAPI = false;
|
||||
|
||||
try {
|
||||
console.log(`[DownloadPWR] Fetching URL from new API for branch: ${branch}, version: ${fileName}`);
|
||||
url = await getPWRUrlFromNewAPI(branch, fileName);
|
||||
isUsingNewAPI = true;
|
||||
console.log(`[DownloadPWR] Using new API URL: ${url}`);
|
||||
} catch (error) {
|
||||
console.error(`[DownloadPWR] Failed to get URL from new API: ${error.message}`);
|
||||
console.log(`[DownloadPWR] Falling back to old URL format`);
|
||||
url = `https://game-patches.hytale.com/patches/${osName}/${arch}/${branch}/0/${fileName}.pwr`;
|
||||
|
||||
if (directUrl) {
|
||||
url = directUrl;
|
||||
console.log(`[DownloadPWR] Using direct URL: ${url}`);
|
||||
} else {
|
||||
const { getPWRUrl } = require('../services/versionManager');
|
||||
try {
|
||||
console.log(`[DownloadPWR] Fetching mirror URL for branch: ${branch}, version: ${fileName}`);
|
||||
url = await getPWRUrl(branch, fileName);
|
||||
console.log(`[DownloadPWR] Mirror URL: ${url}`);
|
||||
} catch (error) {
|
||||
console.error(`[DownloadPWR] Failed to get mirror URL: ${error.message}`);
|
||||
const { getPatchesBaseUrl } = require('../services/versionManager');
|
||||
const baseUrl = await getPatchesBaseUrl();
|
||||
url = `${baseUrl}/${osName}/${arch}/${branch}/0_to_${extractVersionNumber(fileName)}.pwr`;
|
||||
console.log(`[DownloadPWR] Fallback URL: ${url}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Look up expected file size from manifest if not provided
|
||||
if (!expectedSize) {
|
||||
try {
|
||||
const { fetchMirrorManifest } = require('../services/versionManager');
|
||||
const manifest = await fetchMirrorManifest();
|
||||
// Try to match: "0_to_11" format or "v11" format
|
||||
const versionMatch = fileName.match(/^(\d+)_to_(\d+)$/);
|
||||
let manifestKey;
|
||||
if (versionMatch) {
|
||||
manifestKey = `${osName}/${arch}/${branch}/${fileName}.pwr`;
|
||||
} else {
|
||||
const buildNum = extractVersionNumber(fileName);
|
||||
manifestKey = `${osName}/${arch}/${branch}/0_to_${buildNum}.pwr`;
|
||||
}
|
||||
if (manifest.files[manifestKey]) {
|
||||
expectedSize = manifest.files[manifestKey].size;
|
||||
console.log(`[PWR] Expected size from manifest: ${(expectedSize / 1024 / 1024).toFixed(2)} MB`);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(`[PWR] Could not fetch expected size from manifest: ${e.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
const dest = path.join(cacheDir, `${branch}_${fileName}.pwr`);
|
||||
|
||||
// Check if file exists and validate it
|
||||
if (fs.existsSync(dest) && !manualRetry) {
|
||||
console.log('PWR file found in cache:', dest);
|
||||
|
||||
// Validate file size (PWR files should be > 1MB and >= 1.5GB for complete downloads)
|
||||
const stats = fs.statSync(dest);
|
||||
if (stats.size < 1024 * 1024) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if file is under 1.5 GB (incomplete download)
|
||||
const sizeInMB = stats.size / 1024 / 1024;
|
||||
if (sizeInMB < 1500) {
|
||||
console.log(`[PWR Validation] File appears incomplete: ${sizeInMB.toFixed(2)} MB < 1.5 GB`);
|
||||
return false;
|
||||
if (stats.size > 1024 * 1024) {
|
||||
// Validate against expected size - reject if file is truncated (< 99% of expected)
|
||||
if (expectedSize && stats.size < expectedSize * 0.99) {
|
||||
console.log(`[PWR] Cached file truncated: ${(stats.size / 1024 / 1024).toFixed(2)} MB, expected ${(expectedSize / 1024 / 1024).toFixed(2)} MB. Deleting and re-downloading.`);
|
||||
fs.unlinkSync(dest);
|
||||
} else {
|
||||
console.log(`[PWR] Using cached file: ${dest} (${(stats.size / 1024 / 1024).toFixed(2)} MB)`);
|
||||
return dest;
|
||||
}
|
||||
} else {
|
||||
console.log(`[PWR] Cached file too small (${stats.size} bytes), re-downloading`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Fetching PWR patch file from ${isUsingNewAPI ? 'NEW API' : 'old API'}:`, url);
|
||||
console.log(`[DownloadPWR] Downloading from: ${url}`);
|
||||
|
||||
try {
|
||||
if (manualRetry) {
|
||||
@@ -134,7 +160,7 @@ async function downloadPWR(branch = 'release', fileName = 'v8', progressCallback
|
||||
const retryStats = fs.statSync(dest);
|
||||
console.log(`PWR file downloaded (auto-retry), size: ${(retryStats.size / 1024 / 1024).toFixed(2)} MB`);
|
||||
|
||||
if (!validatePWRFile(dest)) {
|
||||
if (!validatePWRFile(dest, expectedSize)) {
|
||||
console.log(`[PWR Validation] PWR file validation failed after auto-retry, deleting corrupted file: ${dest}`);
|
||||
fs.unlinkSync(dest);
|
||||
throw new Error('Downloaded PWR file is corrupted or invalid after automatic retry. Please retry manually');
|
||||
@@ -184,8 +210,8 @@ async function downloadPWR(branch = 'release', fileName = 'v8', progressCallback
|
||||
// Enhanced PWR file validation
|
||||
const stats = fs.statSync(dest);
|
||||
console.log(`PWR file downloaded, size: ${(stats.size / 1024 / 1024).toFixed(2)} MB`);
|
||||
|
||||
if (!validatePWRFile(dest)) {
|
||||
|
||||
if (!validatePWRFile(dest, expectedSize)) {
|
||||
console.log(`[PWR Validation] PWR file validation failed, deleting corrupted file: ${dest}`);
|
||||
fs.unlinkSync(dest);
|
||||
throw new Error('Downloaded PWR file is corrupted or invalid. Please retry');
|
||||
@@ -203,7 +229,7 @@ async function retryPWRDownload(branch, fileName, progressCallback, cacheDir = C
|
||||
return await downloadPWR(branch, fileName, progressCallback, cacheDir, true);
|
||||
}
|
||||
|
||||
async function applyPWR(pwrFile, progressCallback, gameDir = GAME_DIR, toolsDir = TOOLS_DIR, branch = 'release', cacheDir = CACHE_DIR) {
|
||||
async function applyPWR(pwrFile, progressCallback, gameDir = GAME_DIR, toolsDir = TOOLS_DIR, branch = 'release', cacheDir = CACHE_DIR, skipExistingCheck = false) {
|
||||
console.log(`[Butler] Starting PWR application with:`);
|
||||
console.log(`[Butler] - PWR file: ${pwrFile}`);
|
||||
console.log(`[Butler] - Staging dir: ${path.join(gameDir, 'staging-temp')}`);
|
||||
@@ -227,11 +253,12 @@ async function applyPWR(pwrFile, progressCallback, gameDir = GAME_DIR, toolsDir
|
||||
const gameLatest = gameDir;
|
||||
const stagingDir = path.join(gameLatest, 'staging-temp');
|
||||
|
||||
const clientPath = findClientPath(gameLatest);
|
||||
|
||||
if (clientPath) {
|
||||
console.log('Game files detected, skipping patch installation.');
|
||||
return;
|
||||
if (!skipExistingCheck) {
|
||||
const clientPath = findClientPath(gameLatest);
|
||||
if (clientPath) {
|
||||
console.log('Game files detected, skipping patch installation.');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Validate and prepare directories
|
||||
@@ -412,57 +439,118 @@ async function updateGameFiles(newVersion, progressCallback, gameDir = GAME_DIR,
|
||||
}
|
||||
console.log(`Updating game files to version: ${newVersion} (branch: ${branch})`);
|
||||
|
||||
tempUpdateDir = path.join(gameDir, '..', 'temp_update');
|
||||
// Determine update strategy: intermediate patches vs full reinstall
|
||||
const currentVersion = loadVersionClient();
|
||||
const currentBuild = extractVersionNumber(currentVersion) || 0;
|
||||
const targetBuild = extractVersionNumber(newVersion);
|
||||
|
||||
if (fs.existsSync(tempUpdateDir)) {
|
||||
fs.rmSync(tempUpdateDir, { recursive: true, force: true });
|
||||
}
|
||||
fs.mkdirSync(tempUpdateDir, { recursive: true });
|
||||
let useIntermediatePatches = false;
|
||||
let updatePlan = null;
|
||||
|
||||
if (progressCallback) {
|
||||
progressCallback('Downloading new game version...', 20, null, null, null);
|
||||
}
|
||||
|
||||
const pwrFile = await downloadPWR(branch, newVersion, progressCallback, cacheDir);
|
||||
|
||||
if (progressCallback) {
|
||||
progressCallback('Extracting new files...', 60, null, null, null);
|
||||
}
|
||||
|
||||
await applyPWR(pwrFile, progressCallback, tempUpdateDir, toolsDir, branch, cacheDir);
|
||||
// Delete PWR file from cache after successful update
|
||||
try {
|
||||
if (fs.existsSync(pwrFile)) {
|
||||
fs.unlinkSync(pwrFile);
|
||||
console.log('[UpdateGameFiles] PWR file deleted from cache after successful update:', pwrFile);
|
||||
if (currentBuild > 0 && currentBuild < targetBuild) {
|
||||
try {
|
||||
updatePlan = await getUpdatePlan(currentBuild, targetBuild, branch);
|
||||
useIntermediatePatches = !updatePlan.isFullInstall;
|
||||
if (useIntermediatePatches) {
|
||||
const totalMB = (updatePlan.totalSize / 1024 / 1024).toFixed(0);
|
||||
console.log(`[UpdateGameFiles] Using intermediate patches: ${updatePlan.steps.map(s => `${s.from}\u2192${s.to}`).join(' + ')} (${totalMB} MB)`);
|
||||
}
|
||||
} catch (planError) {
|
||||
console.warn('[UpdateGameFiles] Could not get update plan, falling back to full install:', planError.message);
|
||||
}
|
||||
} catch (delErr) {
|
||||
console.warn('[UpdateGameFiles] Failed to delete PWR file from cache:', delErr.message);
|
||||
}
|
||||
if (progressCallback) {
|
||||
progressCallback('Replacing game files...', 80, null, null, null);
|
||||
}
|
||||
|
||||
if (fs.existsSync(gameDir)) {
|
||||
console.log('Removing old game files...');
|
||||
let retries = 3;
|
||||
while (retries > 0) {
|
||||
if (useIntermediatePatches && updatePlan) {
|
||||
// Apply intermediate patches directly to game dir
|
||||
for (let i = 0; i < updatePlan.steps.length; i++) {
|
||||
const step = updatePlan.steps[i];
|
||||
const stepName = `${step.from}_to_${step.to}`;
|
||||
|
||||
if (progressCallback) {
|
||||
const progress = 20 + Math.round((i / updatePlan.steps.length) * 60);
|
||||
progressCallback(`Downloading patch ${i + 1}/${updatePlan.steps.length} (${stepName})...`, progress, null, null, null);
|
||||
}
|
||||
|
||||
const pwrFile = await downloadPWR(branch, stepName, progressCallback, cacheDir, false, step.url, step.size);
|
||||
|
||||
if (!pwrFile) {
|
||||
throw new Error(`Failed to download patch ${stepName}`);
|
||||
}
|
||||
|
||||
if (progressCallback) {
|
||||
progressCallback(`Applying patch ${i + 1}/${updatePlan.steps.length} (${stepName})...`, null, null, null, null);
|
||||
}
|
||||
|
||||
await applyPWR(pwrFile, progressCallback, gameDir, toolsDir, branch, cacheDir, true);
|
||||
|
||||
// Clean up PWR file from cache
|
||||
try {
|
||||
fs.rmSync(gameDir, { recursive: true, force: true });
|
||||
break;
|
||||
} catch (err) {
|
||||
if ((err.code === 'EPERM' || err.code === 'EBUSY') && retries > 0) {
|
||||
retries--;
|
||||
console.log(`[UpdateGameFiles] Removal failed with ${err.code}, retrying in 1s... (${retries} retries left)`);
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
} else {
|
||||
throw err;
|
||||
if (fs.existsSync(pwrFile)) {
|
||||
fs.unlinkSync(pwrFile);
|
||||
}
|
||||
} catch (delErr) {
|
||||
console.warn('[UpdateGameFiles] Failed to delete PWR from cache:', delErr.message);
|
||||
}
|
||||
|
||||
// Save intermediate version so we can resume if interrupted
|
||||
saveVersionClient(`v${step.to}`);
|
||||
console.log(`[UpdateGameFiles] Applied patch ${stepName} (${i + 1}/${updatePlan.steps.length})`);
|
||||
}
|
||||
} else {
|
||||
// Full install: download 0->target, apply to temp dir, swap
|
||||
tempUpdateDir = path.join(gameDir, '..', 'temp_update');
|
||||
|
||||
if (fs.existsSync(tempUpdateDir)) {
|
||||
fs.rmSync(tempUpdateDir, { recursive: true, force: true });
|
||||
}
|
||||
fs.mkdirSync(tempUpdateDir, { recursive: true });
|
||||
|
||||
if (progressCallback) {
|
||||
progressCallback('Downloading new game version...', 20, null, null, null);
|
||||
}
|
||||
|
||||
const pwrFile = await downloadPWR(branch, newVersion, progressCallback, cacheDir);
|
||||
|
||||
if (progressCallback) {
|
||||
progressCallback('Extracting new files...', 60, null, null, null);
|
||||
}
|
||||
|
||||
await applyPWR(pwrFile, progressCallback, tempUpdateDir, toolsDir, branch, cacheDir);
|
||||
|
||||
try {
|
||||
if (fs.existsSync(pwrFile)) {
|
||||
fs.unlinkSync(pwrFile);
|
||||
console.log('[UpdateGameFiles] PWR file deleted from cache after successful update:', pwrFile);
|
||||
}
|
||||
} catch (delErr) {
|
||||
console.warn('[UpdateGameFiles] Failed to delete PWR file from cache:', delErr.message);
|
||||
}
|
||||
|
||||
if (progressCallback) {
|
||||
progressCallback('Replacing game files...', 80, null, null, null);
|
||||
}
|
||||
|
||||
if (fs.existsSync(gameDir)) {
|
||||
console.log('Removing old game files...');
|
||||
let retries = 3;
|
||||
while (retries > 0) {
|
||||
try {
|
||||
fs.rmSync(gameDir, { recursive: true, force: true });
|
||||
break;
|
||||
} catch (err) {
|
||||
if ((err.code === 'EPERM' || err.code === 'EBUSY') && retries > 0) {
|
||||
retries--;
|
||||
console.log(`[UpdateGameFiles] Removal failed with ${err.code}, retrying in 1s... (${retries} retries left)`);
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fs.renameSync(tempUpdateDir, gameDir);
|
||||
fs.renameSync(tempUpdateDir, gameDir);
|
||||
}
|
||||
|
||||
const homeUIResult = await downloadAndReplaceHomePageUI(gameDir, progressCallback);
|
||||
console.log('HomePage.ui update result after update:', homeUIResult);
|
||||
@@ -833,36 +921,30 @@ function validateGameDirectory(gameDir, stagingDir) {
|
||||
}
|
||||
|
||||
// Enhanced PWR file validation
|
||||
function validatePWRFile(filePath) {
|
||||
// Accepts intermediate patches (50+ MB) and full installs (1.5+ GB)
|
||||
function validatePWRFile(filePath, expectedSize = null) {
|
||||
try {
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
const stats = fs.statSync(filePath);
|
||||
const sizeInMB = stats.size / 1024 / 1024;
|
||||
|
||||
|
||||
// PWR files should be at least 1 MB
|
||||
if (stats.size < 1024 * 1024) {
|
||||
console.log(`[PWR Validation] File too small: ${sizeInMB.toFixed(2)} MB`);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if file is under 1.5 GB (incomplete download)
|
||||
if (sizeInMB < 1500) {
|
||||
console.log(`[PWR Validation] File appears incomplete: ${sizeInMB.toFixed(2)} MB < 1.5 GB`);
|
||||
|
||||
// Validate against expected size if known (reject if < 99% of expected)
|
||||
if (expectedSize && stats.size < expectedSize * 0.99) {
|
||||
const expectedMB = expectedSize / 1024 / 1024;
|
||||
console.log(`[PWR Validation] File truncated: ${sizeInMB.toFixed(2)} MB, expected ${expectedMB.toFixed(2)} MB`);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Basic file header validation (PWR files should have specific headers)
|
||||
const buffer = fs.readFileSync(filePath, { start: 0, end: 20 });
|
||||
if (buffer.length < 10) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check for common PWR magic bytes or patterns
|
||||
// This is a basic check - could be enhanced with actual PWR format specification
|
||||
const header = buffer.toString('hex', 0, 10);
|
||||
console.log(`[PWR Validation] File header: ${header}`);
|
||||
|
||||
|
||||
console.log(`[PWR Validation] File size: ${sizeInMB.toFixed(2)} MB - OK`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error(`[PWR Validation] Error:`, error.message);
|
||||
|
||||
@@ -1,287 +1,500 @@
|
||||
const axios = require('axios');
|
||||
const crypto = require('crypto');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { getOS, getArch } = require('../utils/platformUtils');
|
||||
const { smartRequest } = require('../utils/proxyClient');
|
||||
|
||||
const BASE_PATCH_URL = 'https://game-patches.hytale.com/patches';
|
||||
const MANIFEST_API = 'https://files.hytalef2p.com/api/patch_manifest';
|
||||
const NEW_API_URL = 'https://thecute.cloud/ShipOfYarn/api.php';
|
||||
// Patches base URL fetched dynamically via multi-source fallback chain
|
||||
const AUTH_DOMAIN = process.env.HYTALE_AUTH_DOMAIN || 'auth.sanasol.ws';
|
||||
const PATCHES_CONFIG_SOURCES = [
|
||||
{ type: 'http', url: `https://${AUTH_DOMAIN}/api/patches-config`, name: 'primary' },
|
||||
{ type: 'http', url: 'https://htdwnldsan.top/patches-config', name: 'backup-1' },
|
||||
{ type: 'http', url: 'https://dl1.htdwnldsan.top/patches-config', name: 'backup-2' },
|
||||
{ type: 'doh', name: '_patches.htdwnldsan.top', name_label: 'dns-txt' },
|
||||
];
|
||||
const HARDCODED_FALLBACK = 'https://dl.vboro.de/patches';
|
||||
|
||||
let apiCache = null;
|
||||
let apiCacheTime = 0;
|
||||
const API_CACHE_DURATION = 60000; // 1 minute
|
||||
// Alternative mirrors (non-Cloudflare) for regions where CF is blocked
|
||||
const NON_CF_MIRRORS = [
|
||||
'https://dl1.htdwnldsan.top',
|
||||
'https://htdwnldsan.top/patches',
|
||||
];
|
||||
|
||||
async function fetchNewAPI() {
|
||||
const now = Date.now();
|
||||
|
||||
if (apiCache && (now - apiCacheTime) < API_CACHE_DURATION) {
|
||||
console.log('[NewAPI] Using cached API data');
|
||||
return apiCache;
|
||||
// Fallback: latest known build number if manifest is unreachable
|
||||
const FALLBACK_LATEST_BUILD = 11;
|
||||
|
||||
let patchesBaseUrl = null;
|
||||
let patchesConfigTime = 0;
|
||||
const PATCHES_CONFIG_CACHE_DURATION = 300000; // 5 minutes
|
||||
|
||||
let manifestCache = null;
|
||||
let manifestCacheTime = 0;
|
||||
const MANIFEST_CACHE_DURATION = 60000; // 1 minute
|
||||
|
||||
// Disk cache path for patches URL (survives restarts)
|
||||
function getDiskCachePath() {
|
||||
const os = require('os');
|
||||
const home = os.homedir();
|
||||
let appDir;
|
||||
if (process.platform === 'win32') {
|
||||
appDir = path.join(home, 'AppData', 'Local', 'HytaleF2P');
|
||||
} else if (process.platform === 'darwin') {
|
||||
appDir = path.join(home, 'Library', 'Application Support', 'HytaleF2P');
|
||||
} else {
|
||||
appDir = path.join(home, '.hytalef2p');
|
||||
}
|
||||
|
||||
return path.join(appDir, 'patches-url-cache.json');
|
||||
}
|
||||
|
||||
function saveDiskCache(url) {
|
||||
try {
|
||||
console.log('[NewAPI] Fetching from:', NEW_API_URL);
|
||||
const response = await axios.get(NEW_API_URL, {
|
||||
timeout: 15000,
|
||||
headers: {
|
||||
'User-Agent': 'Hytale-F2P-Launcher'
|
||||
const cachePath = getDiskCachePath();
|
||||
const dir = path.dirname(cachePath);
|
||||
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
|
||||
fs.writeFileSync(cachePath, JSON.stringify({ patches_url: url, ts: Date.now() }), 'utf8');
|
||||
} catch (e) {
|
||||
// Non-critical, ignore
|
||||
}
|
||||
}
|
||||
|
||||
function loadDiskCache() {
|
||||
try {
|
||||
const cachePath = getDiskCachePath();
|
||||
if (fs.existsSync(cachePath)) {
|
||||
const data = JSON.parse(fs.readFileSync(cachePath, 'utf8'));
|
||||
if (data && data.patches_url) return data.patches_url;
|
||||
}
|
||||
} catch (e) {
|
||||
// Non-critical, ignore
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch patches URL from a single HTTP config endpoint
|
||||
*/
|
||||
async function fetchFromHttp(url) {
|
||||
const response = await axios.get(url, {
|
||||
timeout: 8000,
|
||||
headers: { 'User-Agent': 'Hytale-F2P-Launcher' }
|
||||
});
|
||||
if (response.data && response.data.patches_url) {
|
||||
return response.data.patches_url.replace(/\/+$/, '');
|
||||
}
|
||||
throw new Error('Invalid response');
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch patches URL from DNS TXT record via DNS-over-HTTPS
|
||||
*/
|
||||
async function fetchFromDoh(recordName) {
|
||||
const dohEndpoints = [
|
||||
{ url: 'https://dns.google/resolve', params: { name: recordName, type: 'TXT' } },
|
||||
{ url: 'https://cloudflare-dns.com/dns-query', params: { name: recordName, type: 'TXT' }, headers: { 'Accept': 'application/dns-json' } },
|
||||
];
|
||||
|
||||
for (const endpoint of dohEndpoints) {
|
||||
try {
|
||||
const response = await axios.get(endpoint.url, {
|
||||
params: endpoint.params,
|
||||
headers: { 'User-Agent': 'Hytale-F2P-Launcher', ...(endpoint.headers || {}) },
|
||||
timeout: 5000
|
||||
});
|
||||
const answers = response.data && response.data.Answer;
|
||||
if (answers && answers.length > 0) {
|
||||
// TXT records are quoted, strip quotes
|
||||
const txt = answers[0].data.replace(/^"|"$/g, '');
|
||||
if (txt.startsWith('http')) return txt.replace(/\/+$/, '');
|
||||
}
|
||||
});
|
||||
|
||||
if (response.data && response.data.hytale) {
|
||||
apiCache = response.data;
|
||||
apiCacheTime = now;
|
||||
console.log('[NewAPI] API data fetched and cached successfully');
|
||||
return response.data;
|
||||
} else {
|
||||
throw new Error('Invalid API response structure');
|
||||
} catch (e) {
|
||||
// Try next DoH endpoint
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[NewAPI] Error fetching API:', error.message);
|
||||
if (apiCache) {
|
||||
console.log('[NewAPI] Using expired cache due to error');
|
||||
return apiCache;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
throw new Error('All DoH endpoints failed');
|
||||
}
|
||||
|
||||
async function getLatestVersionFromNewAPI(branch = 'release') {
|
||||
try {
|
||||
const apiData = await fetchNewAPI();
|
||||
const osName = getOS();
|
||||
const arch = getArch();
|
||||
|
||||
let osKey = osName;
|
||||
if (osName === 'darwin') {
|
||||
osKey = 'mac';
|
||||
}
|
||||
|
||||
const branchData = apiData.hytale[branch];
|
||||
if (!branchData || !branchData[osKey]) {
|
||||
throw new Error(`No data found for branch: ${branch}, OS: ${osKey}`);
|
||||
}
|
||||
|
||||
const osData = branchData[osKey];
|
||||
|
||||
const versions = Object.keys(osData).filter(key => key.endsWith('.pwr'));
|
||||
|
||||
if (versions.length === 0) {
|
||||
throw new Error(`No .pwr files found for ${osKey}`);
|
||||
}
|
||||
|
||||
const versionNumbers = versions.map(v => {
|
||||
const match = v.match(/v(\d+)/);
|
||||
return match ? parseInt(match[1]) : 0;
|
||||
});
|
||||
|
||||
const latestVersionNumber = Math.max(...versionNumbers);
|
||||
console.log(`[NewAPI] Latest version number: ${latestVersionNumber} for branch ${branch}`);
|
||||
|
||||
return `v${latestVersionNumber}`;
|
||||
} catch (error) {
|
||||
console.error('[NewAPI] Error getting latest version:', error.message);
|
||||
throw error;
|
||||
/**
|
||||
* Fetch patches base URL with hardened multi-source fallback chain:
|
||||
* 1. Memory cache (5 min)
|
||||
* 2. HTTP: auth.sanasol.ws (primary)
|
||||
* 3. HTTP: htdwnldsan.top (backup, different host/domain/registrar)
|
||||
* 4. DNS TXT: _patches.htdwnldsan.top via DoH (different protocol layer)
|
||||
* 5. Disk cache (survives restarts, never expires)
|
||||
* 6. Hardcoded fallback URL (last resort)
|
||||
*/
|
||||
async function getPatchesBaseUrl() {
|
||||
const now = Date.now();
|
||||
|
||||
// 1. Memory cache
|
||||
if (patchesBaseUrl && (now - patchesConfigTime) < PATCHES_CONFIG_CACHE_DURATION) {
|
||||
return patchesBaseUrl;
|
||||
}
|
||||
|
||||
// 2-4. Try all sources: HTTP endpoints first, then DoH
|
||||
for (const source of PATCHES_CONFIG_SOURCES) {
|
||||
try {
|
||||
let url;
|
||||
if (source.type === 'http') {
|
||||
console.log(`[Mirror] Trying ${source.name}: ${source.url}`);
|
||||
url = await fetchFromHttp(source.url);
|
||||
} else if (source.type === 'doh') {
|
||||
console.log(`[Mirror] Trying ${source.name_label}: ${source.name}`);
|
||||
url = await fetchFromDoh(source.name);
|
||||
}
|
||||
if (url) {
|
||||
patchesBaseUrl = url;
|
||||
patchesConfigTime = now;
|
||||
saveDiskCache(url);
|
||||
console.log(`[Mirror] Patches URL (via ${source.name || source.name_label}): ${url}`);
|
||||
return url;
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn(`[Mirror] ${source.name || source.name_label} failed: ${e.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Stale memory cache (any age)
|
||||
if (patchesBaseUrl) {
|
||||
console.log('[Mirror] All sources failed, using stale memory cache:', patchesBaseUrl);
|
||||
return patchesBaseUrl;
|
||||
}
|
||||
|
||||
// 6. Disk cache (survives restarts)
|
||||
const diskUrl = loadDiskCache();
|
||||
if (diskUrl) {
|
||||
patchesBaseUrl = diskUrl;
|
||||
console.log('[Mirror] All sources failed, using disk cache:', diskUrl);
|
||||
return diskUrl;
|
||||
}
|
||||
|
||||
// 7. Hardcoded fallback
|
||||
console.warn('[Mirror] All sources + caches exhausted, using hardcoded fallback:', HARDCODED_FALLBACK);
|
||||
patchesBaseUrl = HARDCODED_FALLBACK;
|
||||
return HARDCODED_FALLBACK;
|
||||
}
|
||||
|
||||
async function getPWRUrlFromNewAPI(branch = 'release', version = 'v8') {
|
||||
try {
|
||||
const apiData = await fetchNewAPI();
|
||||
const osName = getOS();
|
||||
const arch = getArch();
|
||||
|
||||
let osKey = osName;
|
||||
if (osName === 'darwin') {
|
||||
osKey = 'mac';
|
||||
}
|
||||
|
||||
let fileName;
|
||||
if (osName === 'windows') {
|
||||
fileName = `${version}-windows-amd64.pwr`;
|
||||
} else if (osName === 'linux') {
|
||||
fileName = `${version}-linux-amd64.pwr`;
|
||||
} else if (osName === 'darwin') {
|
||||
fileName = `${version}-darwin-arm64.pwr`;
|
||||
}
|
||||
|
||||
const branchData = apiData.hytale[branch];
|
||||
if (!branchData || !branchData[osKey]) {
|
||||
throw new Error(`No data found for branch: ${branch}, OS: ${osKey}`);
|
||||
}
|
||||
|
||||
const osData = branchData[osKey];
|
||||
const url = osData[fileName];
|
||||
|
||||
if (!url) {
|
||||
throw new Error(`No URL found for ${fileName}`);
|
||||
}
|
||||
|
||||
console.log(`[NewAPI] URL for ${fileName}: ${url}`);
|
||||
return url;
|
||||
} catch (error) {
|
||||
console.error('[NewAPI] Error getting PWR URL:', error.message);
|
||||
throw error;
|
||||
/**
|
||||
* Get all available mirror base URLs (primary + non-Cloudflare fallbacks)
|
||||
* Used by download logic to retry on different mirrors when primary is blocked
|
||||
*/
|
||||
async function getAllMirrorUrls() {
|
||||
const primary = await getPatchesBaseUrl();
|
||||
// Deduplicate: don't include mirrors that match primary
|
||||
const mirrors = NON_CF_MIRRORS.filter(m => m !== primary);
|
||||
return [primary, ...mirrors];
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch the mirror manifest — tries primary URL first, then non-Cloudflare mirrors
|
||||
*/
|
||||
async function fetchMirrorManifest() {
|
||||
const now = Date.now();
|
||||
|
||||
if (manifestCache && (now - manifestCacheTime) < MANIFEST_CACHE_DURATION) {
|
||||
console.log('[Mirror] Using cached manifest');
|
||||
return manifestCache;
|
||||
}
|
||||
|
||||
const mirrors = await getAllMirrorUrls();
|
||||
|
||||
for (let i = 0; i < mirrors.length; i++) {
|
||||
const baseUrl = mirrors[i];
|
||||
const manifestUrl = `${baseUrl}/manifest.json`;
|
||||
try {
|
||||
console.log(`[Mirror] Fetching manifest from: ${manifestUrl}`);
|
||||
const response = await axios.get(manifestUrl, {
|
||||
timeout: 15000,
|
||||
maxRedirects: 5,
|
||||
headers: { 'User-Agent': 'Hytale-F2P-Launcher' }
|
||||
});
|
||||
|
||||
if (response.data && response.data.files) {
|
||||
manifestCache = response.data;
|
||||
manifestCacheTime = now;
|
||||
// If a non-primary mirror worked, switch to it for downloads too
|
||||
if (i > 0) {
|
||||
console.log(`[Mirror] Primary unreachable, switching to mirror: ${baseUrl}`);
|
||||
patchesBaseUrl = baseUrl;
|
||||
patchesConfigTime = now;
|
||||
saveDiskCache(baseUrl);
|
||||
}
|
||||
console.log('[Mirror] Manifest fetched successfully');
|
||||
return response.data;
|
||||
}
|
||||
throw new Error('Invalid manifest structure');
|
||||
} catch (error) {
|
||||
const isTimeout = error.code === 'ETIMEDOUT' || error.code === 'ECONNABORTED' || error.message.includes('timeout');
|
||||
console.error(`[Mirror] Error fetching manifest from ${baseUrl}: ${error.message}${isTimeout ? ' (Cloudflare may be blocked)' : ''}`);
|
||||
if (i < mirrors.length - 1) {
|
||||
console.log(`[Mirror] Trying next mirror...`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// All mirrors failed — use cached manifest if available
|
||||
if (manifestCache) {
|
||||
console.log('[Mirror] All mirrors failed, using expired cache');
|
||||
return manifestCache;
|
||||
}
|
||||
throw new Error('All mirrors failed and no cached manifest available');
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse manifest to get available patches for current platform
|
||||
* Returns array of { from, to, key, size }
|
||||
*/
|
||||
function getPlatformPatches(manifest, branch = 'release') {
|
||||
const os = getOS();
|
||||
const arch = getArch();
|
||||
const prefix = `${os}/${arch}/${branch}/`;
|
||||
const patches = [];
|
||||
|
||||
for (const [key, info] of Object.entries(manifest.files)) {
|
||||
if (key.startsWith(prefix) && key.endsWith('.pwr')) {
|
||||
const filename = key.slice(prefix.length, -4); // e.g., "0_to_11"
|
||||
const match = filename.match(/^(\d+)_to_(\d+)$/);
|
||||
if (match) {
|
||||
patches.push({
|
||||
from: parseInt(match[1]),
|
||||
to: parseInt(match[2]),
|
||||
key,
|
||||
size: info.size
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return patches;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find optimal patch path using BFS with download size minimization
|
||||
* Returns array of { from, to, url, size, key } steps, or null if no path found
|
||||
*/
|
||||
async function findOptimalPatchPath(currentBuild, targetBuild, patches) {
|
||||
if (currentBuild >= targetBuild) return [];
|
||||
|
||||
const baseUrl = await getPatchesBaseUrl();
|
||||
const edges = {};
|
||||
for (const patch of patches) {
|
||||
if (!edges[patch.from]) edges[patch.from] = [];
|
||||
edges[patch.from].push(patch);
|
||||
}
|
||||
|
||||
const queue = [{ build: currentBuild, path: [], totalSize: 0 }];
|
||||
let bestPath = null;
|
||||
let bestSize = Infinity;
|
||||
|
||||
while (queue.length > 0) {
|
||||
const { build, path, totalSize } = queue.shift();
|
||||
|
||||
if (build === targetBuild) {
|
||||
if (totalSize < bestSize) {
|
||||
bestPath = path;
|
||||
bestSize = totalSize;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (totalSize >= bestSize) continue;
|
||||
|
||||
const nextEdges = edges[build] || [];
|
||||
for (const edge of nextEdges) {
|
||||
if (edge.to <= build || edge.to > targetBuild) continue;
|
||||
if (path.some(p => p.to === edge.to)) continue;
|
||||
|
||||
queue.push({
|
||||
build: edge.to,
|
||||
path: [...path, {
|
||||
from: edge.from,
|
||||
to: edge.to,
|
||||
url: `${baseUrl}/${edge.key}`,
|
||||
size: edge.size,
|
||||
key: edge.key
|
||||
}],
|
||||
totalSize: totalSize + edge.size
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return bestPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the optimal update plan from currentBuild to targetBuild
|
||||
* Returns { steps: [{from, to, url, size}], totalSize, isFullInstall }
|
||||
*/
|
||||
async function getUpdatePlan(currentBuild, targetBuild, branch = 'release') {
|
||||
const manifest = await fetchMirrorManifest();
|
||||
const patches = getPlatformPatches(manifest, branch);
|
||||
|
||||
// Try optimal path
|
||||
const steps = await findOptimalPatchPath(currentBuild, targetBuild, patches);
|
||||
|
||||
if (steps && steps.length > 0) {
|
||||
const totalSize = steps.reduce((sum, s) => sum + s.size, 0);
|
||||
console.log(`[Mirror] Update plan: ${steps.map(s => `${s.from}\u2192${s.to}`).join(' + ')} (${(totalSize / 1024 / 1024).toFixed(0)} MB)`);
|
||||
return { steps, totalSize, isFullInstall: steps.length === 1 && steps[0].from === 0 };
|
||||
}
|
||||
|
||||
// Fallback: full install 0 -> target
|
||||
const fullPatch = patches.find(p => p.from === 0 && p.to === targetBuild);
|
||||
if (fullPatch) {
|
||||
const baseUrl = await getPatchesBaseUrl();
|
||||
const step = {
|
||||
from: 0,
|
||||
to: targetBuild,
|
||||
url: `${baseUrl}/${fullPatch.key}`,
|
||||
size: fullPatch.size,
|
||||
key: fullPatch.key
|
||||
};
|
||||
console.log(`[Mirror] Full install: 0\u2192${targetBuild} (${(fullPatch.size / 1024 / 1024).toFixed(0)} MB)`);
|
||||
return { steps: [step], totalSize: fullPatch.size, isFullInstall: true };
|
||||
}
|
||||
|
||||
throw new Error(`No patch path found from build ${currentBuild} to ${targetBuild} for ${getOS()}/${getArch()}`);
|
||||
}
|
||||
|
||||
async function getLatestClientVersion(branch = 'release') {
|
||||
try {
|
||||
console.log(`[NewAPI] Fetching latest client version from new API (branch: ${branch})...`);
|
||||
|
||||
// Utiliser la nouvelle API
|
||||
const latestVersion = await getLatestVersionFromNewAPI(branch);
|
||||
console.log(`[NewAPI] Latest client version for ${branch}: ${latestVersion}`);
|
||||
return latestVersion;
|
||||
|
||||
} catch (error) {
|
||||
console.error('[NewAPI] Error fetching client version from new API:', error.message);
|
||||
console.log('[NewAPI] Falling back to old API...');
|
||||
|
||||
// Fallback vers l'ancienne API si la nouvelle échoue
|
||||
try {
|
||||
const response = await smartRequest(`https://files.hytalef2p.com/api/version_client?branch=${branch}`, {
|
||||
timeout: 40000,
|
||||
headers: {
|
||||
'User-Agent': 'Hytale-F2P-Launcher'
|
||||
}
|
||||
});
|
||||
console.log(`[Mirror] Fetching latest client version (branch: ${branch})...`);
|
||||
const manifest = await fetchMirrorManifest();
|
||||
const patches = getPlatformPatches(manifest, branch);
|
||||
|
||||
if (response.data && response.data.client_version) {
|
||||
const version = response.data.client_version;
|
||||
console.log(`Latest client version for ${branch} (old API): ${version}`);
|
||||
return version;
|
||||
} else {
|
||||
console.log('Warning: Invalid API response, falling back to latest known version (v8)');
|
||||
return 'v8';
|
||||
}
|
||||
} catch (fallbackError) {
|
||||
console.error('Error fetching client version from old API:', fallbackError.message);
|
||||
console.log('Warning: Both APIs unavailable, falling back to latest known version (v8)');
|
||||
return 'v8';
|
||||
if (patches.length === 0) {
|
||||
console.log(`[Mirror] No patches for branch '${branch}', using fallback`);
|
||||
return `v${FALLBACK_LATEST_BUILD}`;
|
||||
}
|
||||
|
||||
const latestBuild = Math.max(...patches.map(p => p.to));
|
||||
console.log(`[Mirror] Latest client version: v${latestBuild}`);
|
||||
return `v${latestBuild}`;
|
||||
} catch (error) {
|
||||
console.error('[Mirror] Error:', error.message);
|
||||
return `v${FALLBACK_LATEST_BUILD}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Fonction utilitaire pour extraire le numéro de version
|
||||
// Supporte les formats: "7.pwr", "v8", "v8-windows-amd64.pwr", etc.
|
||||
/**
|
||||
* Get PWR download URL for fresh install (0 -> target)
|
||||
* Backward-compatible with old getPWRUrlFromNewAPI signature
|
||||
* Checks mirror first, then constructs URL for the branch
|
||||
*/
|
||||
async function getPWRUrl(branch = 'release', version = 'v11') {
|
||||
const targetBuild = extractVersionNumber(version);
|
||||
const os = getOS();
|
||||
const arch = getArch();
|
||||
|
||||
try {
|
||||
const manifest = await fetchMirrorManifest();
|
||||
const patches = getPlatformPatches(manifest, branch);
|
||||
const fullPatch = patches.find(p => p.from === 0 && p.to === targetBuild);
|
||||
|
||||
if (fullPatch) {
|
||||
const baseUrl = await getPatchesBaseUrl();
|
||||
const url = `${baseUrl}/${fullPatch.key}`;
|
||||
console.log(`[Mirror] PWR URL: ${url}`);
|
||||
return url;
|
||||
}
|
||||
|
||||
if (patches.length > 0) {
|
||||
// Branch exists in mirror but no full patch for this target - construct URL
|
||||
console.log(`[Mirror] No 0->${targetBuild} patch found, constructing URL`);
|
||||
} else {
|
||||
console.log(`[Mirror] Branch '${branch}' not in mirror, constructing URL`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Mirror] Error getting PWR URL:', error.message);
|
||||
}
|
||||
|
||||
// Construct mirror URL (will work if patch was uploaded but manifest is stale)
|
||||
const baseUrl = await getPatchesBaseUrl();
|
||||
return `${baseUrl}/${os}/${arch}/${branch}/0_to_${targetBuild}.pwr`;
|
||||
}
|
||||
|
||||
// Backward-compatible alias
|
||||
const getPWRUrlFromNewAPI = getPWRUrl;
|
||||
|
||||
// Utility function to extract version number
|
||||
// Supports: "7.pwr", "v8", "v8-windows-amd64.pwr", "5_to_10", etc.
|
||||
function extractVersionNumber(version) {
|
||||
if (!version) return 0;
|
||||
|
||||
// Nouveau format: "v8" ou "v8-xxx.pwr"
|
||||
|
||||
// New format: "v8" or "v8-xxx.pwr"
|
||||
const vMatch = version.match(/v(\d+)/);
|
||||
if (vMatch) {
|
||||
return parseInt(vMatch[1]);
|
||||
}
|
||||
|
||||
// Ancien format: "7.pwr"
|
||||
if (vMatch) return parseInt(vMatch[1]);
|
||||
|
||||
// Old format: "7.pwr"
|
||||
const pwrMatch = version.match(/(\d+)\.pwr/);
|
||||
if (pwrMatch) {
|
||||
return parseInt(pwrMatch[1]);
|
||||
}
|
||||
|
||||
// Fallback: essayer de parser directement
|
||||
if (pwrMatch) return parseInt(pwrMatch[1]);
|
||||
|
||||
// Fallback
|
||||
const num = parseInt(version);
|
||||
return isNaN(num) ? 0 : num;
|
||||
}
|
||||
|
||||
function buildArchiveUrl(buildNumber, branch = 'release') {
|
||||
async function buildArchiveUrl(buildNumber, branch = 'release') {
|
||||
const baseUrl = await getPatchesBaseUrl();
|
||||
const os = getOS();
|
||||
const arch = getArch();
|
||||
return `${BASE_PATCH_URL}/${os}/${arch}/${branch}/0/${buildNumber}.pwr`;
|
||||
return `${baseUrl}/${os}/${arch}/${branch}/0_to_${buildNumber}.pwr`;
|
||||
}
|
||||
|
||||
async function checkArchiveExists(buildNumber, branch = 'release') {
|
||||
const url = buildArchiveUrl(buildNumber, branch);
|
||||
const url = await buildArchiveUrl(buildNumber, branch);
|
||||
try {
|
||||
const response = await axios.head(url, { timeout: 10000 });
|
||||
return response.status === 200;
|
||||
} catch (error) {
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function discoverAvailableVersions(latestKnown, branch = 'release', maxProbe = 50) {
|
||||
const available = [];
|
||||
const latest = extractVersionNumber(latestKnown);
|
||||
|
||||
for (let i = latest; i >= Math.max(1, latest - maxProbe); i--) {
|
||||
const exists = await checkArchiveExists(i, branch);
|
||||
if (exists) {
|
||||
available.push(`${i}.pwr`);
|
||||
}
|
||||
}
|
||||
|
||||
return available;
|
||||
}
|
||||
|
||||
async function fetchPatchManifest(branch = 'release') {
|
||||
async function discoverAvailableVersions(latestKnown, branch = 'release') {
|
||||
try {
|
||||
const os = getOS();
|
||||
const arch = getArch();
|
||||
const response = await smartRequest(`${MANIFEST_API}?branch=${branch}&os=${os}&arch=${arch}`, {
|
||||
timeout: 10000
|
||||
});
|
||||
return response.data.patches || {};
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch patch manifest:', error.message);
|
||||
return {};
|
||||
const manifest = await fetchMirrorManifest();
|
||||
const patches = getPlatformPatches(manifest, branch);
|
||||
const versions = [...new Set(patches.map(p => p.to))].sort((a, b) => b - a);
|
||||
return versions.map(v => `${v}.pwr`);
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
async function extractVersionDetails(targetVersion, branch = 'release') {
|
||||
const buildNumber = extractVersionNumber(targetVersion);
|
||||
const previousBuild = buildNumber - 1;
|
||||
|
||||
const manifest = await fetchPatchManifest(branch);
|
||||
const patchInfo = manifest[buildNumber];
|
||||
|
||||
const fullUrl = await buildArchiveUrl(buildNumber, branch);
|
||||
|
||||
return {
|
||||
version: targetVersion,
|
||||
buildNumber: buildNumber,
|
||||
buildNumber,
|
||||
buildName: `HYTALE-Build-${buildNumber}`,
|
||||
fullUrl: patchInfo?.original_url || buildArchiveUrl(buildNumber, branch),
|
||||
differentialUrl: patchInfo?.patch_url || null,
|
||||
checksum: patchInfo?.patch_hash || null,
|
||||
sourceVersion: patchInfo?.from ? `${patchInfo.from}.pwr` : (previousBuild > 0 ? `${previousBuild}.pwr` : null),
|
||||
isDifferential: !!patchInfo?.proper_patch,
|
||||
releaseNotes: patchInfo?.patch_note || null
|
||||
fullUrl,
|
||||
differentialUrl: null,
|
||||
checksum: null,
|
||||
sourceVersion: null,
|
||||
isDifferential: false,
|
||||
releaseNotes: null
|
||||
};
|
||||
}
|
||||
|
||||
function canUseDifferentialUpdate(currentVersion, targetDetails) {
|
||||
if (!targetDetails) return false;
|
||||
if (!targetDetails.differentialUrl) return false;
|
||||
if (!targetDetails.isDifferential) return false;
|
||||
|
||||
if (!currentVersion) return false;
|
||||
|
||||
const currentBuild = extractVersionNumber(currentVersion);
|
||||
const expectedSource = extractVersionNumber(targetDetails.sourceVersion);
|
||||
|
||||
return currentBuild === expectedSource;
|
||||
function canUseDifferentialUpdate() {
|
||||
// Differential updates are now handled via getUpdatePlan()
|
||||
return false;
|
||||
}
|
||||
|
||||
function needsIntermediatePatches(currentVersion, targetVersion) {
|
||||
if (!currentVersion) return [];
|
||||
|
||||
const current = extractVersionNumber(currentVersion);
|
||||
const target = extractVersionNumber(targetVersion);
|
||||
|
||||
const intermediates = [];
|
||||
for (let i = current + 1; i <= target; i++) {
|
||||
intermediates.push(`${i}.pwr`);
|
||||
}
|
||||
|
||||
return intermediates;
|
||||
if (current >= target) return [];
|
||||
return [targetVersion];
|
||||
}
|
||||
|
||||
async function computeFileChecksum(filePath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const hash = crypto.createHash('sha256');
|
||||
const stream = fs.createReadStream(filePath);
|
||||
|
||||
stream.on('data', data => hash.update(data));
|
||||
stream.on('end', () => resolve(hash.digest('hex')));
|
||||
stream.on('error', reject);
|
||||
@@ -290,7 +503,6 @@ async function computeFileChecksum(filePath) {
|
||||
|
||||
async function validateChecksum(filePath, expectedChecksum) {
|
||||
if (!expectedChecksum) return true;
|
||||
|
||||
const actualChecksum = await computeFileChecksum(filePath);
|
||||
return actualChecksum === expectedChecksum;
|
||||
}
|
||||
@@ -299,7 +511,7 @@ function getInstalledClientVersion() {
|
||||
try {
|
||||
const { loadVersionClient } = require('../core/config');
|
||||
return loadVersionClient();
|
||||
} catch (err) {
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -315,8 +527,13 @@ module.exports = {
|
||||
computeFileChecksum,
|
||||
validateChecksum,
|
||||
getInstalledClientVersion,
|
||||
fetchNewAPI,
|
||||
getLatestVersionFromNewAPI,
|
||||
fetchMirrorManifest,
|
||||
getPWRUrl,
|
||||
getPWRUrlFromNewAPI,
|
||||
extractVersionNumber
|
||||
getUpdatePlan,
|
||||
extractVersionNumber,
|
||||
getPlatformPatches,
|
||||
findOptimalPatchPath,
|
||||
getPatchesBaseUrl,
|
||||
getAllMirrorUrls
|
||||
};
|
||||
|
||||
@@ -9,7 +9,9 @@ const MAX_DOMAIN_LENGTH = 16;
|
||||
|
||||
// DualAuth ByteBuddy Agent (runtime class transformation, no JAR modification)
|
||||
const DUALAUTH_AGENT_URL = 'https://github.com/sanasol/hytale-auth-server/releases/latest/download/dualauth-agent.jar';
|
||||
const DUALAUTH_AGENT_VERSION_API = 'https://api.github.com/repos/sanasol/hytale-auth-server/releases/latest';
|
||||
const DUALAUTH_AGENT_FILENAME = 'dualauth-agent.jar';
|
||||
const DUALAUTH_AGENT_VERSION_FILE = 'dualauth-agent.version';
|
||||
|
||||
function getTargetDomain() {
|
||||
if (process.env.HYTALE_AUTH_DOMAIN) {
|
||||
@@ -511,30 +513,70 @@ class ClientPatcher {
|
||||
*/
|
||||
async ensureAgentAvailable(serverDir, progressCallback) {
|
||||
const agentPath = this.getAgentPath(serverDir);
|
||||
const versionPath = path.join(serverDir, DUALAUTH_AGENT_VERSION_FILE);
|
||||
|
||||
console.log('=== DualAuth Agent (ByteBuddy) ===');
|
||||
console.log(`Target: ${agentPath}`);
|
||||
|
||||
// Check if agent already exists and is valid
|
||||
// Check local version and whether file exists
|
||||
let localVersion = null;
|
||||
let agentExists = false;
|
||||
if (fs.existsSync(agentPath)) {
|
||||
try {
|
||||
const stats = fs.statSync(agentPath);
|
||||
if (stats.size > 1024) {
|
||||
console.log(`DualAuth Agent present (${(stats.size / 1024).toFixed(0)} KB)`);
|
||||
if (progressCallback) progressCallback('DualAuth Agent ready', 100);
|
||||
return { success: true, agentPath, alreadyExists: true };
|
||||
agentExists = true;
|
||||
if (fs.existsSync(versionPath)) {
|
||||
localVersion = fs.readFileSync(versionPath, 'utf8').trim();
|
||||
}
|
||||
} else {
|
||||
console.log('Agent file appears corrupt, re-downloading...');
|
||||
fs.unlinkSync(agentPath);
|
||||
}
|
||||
// File exists but too small - corrupt, re-download
|
||||
console.log('Agent file appears corrupt, re-downloading...');
|
||||
fs.unlinkSync(agentPath);
|
||||
} catch (e) {
|
||||
console.warn('Could not check agent file:', e.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Check for updates from GitHub
|
||||
let remoteVersion = null;
|
||||
let needsDownload = !agentExists;
|
||||
if (agentExists) {
|
||||
try {
|
||||
if (progressCallback) progressCallback('Checking for agent updates...', 5);
|
||||
const axios = require('axios');
|
||||
const resp = await axios.get(DUALAUTH_AGENT_VERSION_API, {
|
||||
timeout: 5000,
|
||||
headers: { 'Accept': 'application/vnd.github.v3+json' }
|
||||
});
|
||||
remoteVersion = resp.data.tag_name; // e.g. "v1.1.10"
|
||||
if (localVersion && localVersion === remoteVersion) {
|
||||
console.log(`DualAuth Agent up to date (${localVersion})`);
|
||||
if (progressCallback) progressCallback('DualAuth Agent ready', 100);
|
||||
return { success: true, agentPath, alreadyExists: true, version: localVersion };
|
||||
}
|
||||
console.log(`Agent update available: ${localVersion || 'unknown'} → ${remoteVersion}`);
|
||||
needsDownload = true;
|
||||
} catch (e) {
|
||||
// GitHub API failed - use existing agent if available
|
||||
console.warn(`Could not check for updates: ${e.message}`);
|
||||
if (agentExists) {
|
||||
console.log(`Using existing agent (${localVersion || 'unknown version'})`);
|
||||
if (progressCallback) progressCallback('DualAuth Agent ready', 100);
|
||||
return { success: true, agentPath, alreadyExists: true, version: localVersion };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!needsDownload) {
|
||||
if (progressCallback) progressCallback('DualAuth Agent ready', 100);
|
||||
return { success: true, agentPath, alreadyExists: true, version: localVersion };
|
||||
}
|
||||
|
||||
// Download agent from GitHub releases
|
||||
if (progressCallback) progressCallback('Downloading DualAuth Agent...', 20);
|
||||
console.log(`Downloading from: ${DUALAUTH_AGENT_URL}`);
|
||||
const action = agentExists ? 'Updating' : 'Downloading';
|
||||
if (progressCallback) progressCallback(`${action} DualAuth Agent...`, 20);
|
||||
console.log(`${action} from: ${DUALAUTH_AGENT_URL}`);
|
||||
|
||||
try {
|
||||
// Ensure server directory exists
|
||||
@@ -548,7 +590,7 @@ class ClientPatcher {
|
||||
const stream = await smartDownloadStream(DUALAUTH_AGENT_URL, (chunk, downloadedBytes, total) => {
|
||||
if (progressCallback && total) {
|
||||
const percent = 20 + Math.floor((downloadedBytes / total) * 70);
|
||||
progressCallback(`Downloading agent... ${(downloadedBytes / 1024).toFixed(0)} KB`, percent);
|
||||
progressCallback(`${action} agent... ${(downloadedBytes / 1024).toFixed(0)} KB`, percent);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -575,9 +617,13 @@ class ClientPatcher {
|
||||
}
|
||||
fs.renameSync(tmpPath, agentPath);
|
||||
|
||||
console.log(`DualAuth Agent downloaded (${(stats.size / 1024).toFixed(0)} KB)`);
|
||||
// Save version
|
||||
const version = remoteVersion || 'unknown';
|
||||
fs.writeFileSync(versionPath, version, 'utf8');
|
||||
|
||||
console.log(`DualAuth Agent ${agentExists ? 'updated' : 'downloaded'} (${(stats.size / 1024).toFixed(0)} KB, ${version})`);
|
||||
if (progressCallback) progressCallback('DualAuth Agent ready', 100);
|
||||
return { success: true, agentPath };
|
||||
return { success: true, agentPath, updated: agentExists, version };
|
||||
|
||||
} catch (downloadError) {
|
||||
console.error(`Failed to download DualAuth Agent: ${downloadError.message}`);
|
||||
@@ -586,6 +632,11 @@ class ClientPatcher {
|
||||
if (fs.existsSync(tmpPath)) {
|
||||
try { fs.unlinkSync(tmpPath); } catch (e) { /* ignore */ }
|
||||
}
|
||||
// If we had an existing agent, still use it
|
||||
if (agentExists) {
|
||||
console.log('Using existing agent despite update failure');
|
||||
return { success: true, agentPath, alreadyExists: true, version: localVersion };
|
||||
}
|
||||
return { success: false, error: downloadError.message };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,2 @@
|
||||
provider: github
|
||||
owner: amiayweb # Change to your own GitHub username
|
||||
repo: Hytale-F2P
|
||||
provider: generic
|
||||
url: https://git.sanhost.net/sanasol/hytale-f2p/releases/download/latest
|
||||
|
||||
10
main.js
10
main.js
@@ -84,12 +84,12 @@ function setDiscordActivity() {
|
||||
largeImageText: 'Hytale F2P Launcher',
|
||||
buttons: [
|
||||
{
|
||||
label: 'GitHub',
|
||||
url: 'https://github.com/amiayweb/Hytale-F2P'
|
||||
label: 'Download',
|
||||
url: 'https://git.sanhost.net/sanasol/hytale-f2p/releases'
|
||||
},
|
||||
{
|
||||
label: 'Discord',
|
||||
url: 'https://discord.gg/hf2pdc'
|
||||
url: 'https://discord.gg/Fhbb9Yk5WW'
|
||||
}
|
||||
]
|
||||
});
|
||||
@@ -964,8 +964,8 @@ ipcMain.handle('open-external', async (event, url) => {
|
||||
|
||||
ipcMain.handle('open-download-page', async () => {
|
||||
try {
|
||||
// Open GitHub releases page for manual download
|
||||
await shell.openExternal('https://github.com/amiayweb/Hytale-F2P/releases/latest');
|
||||
// Open Forgejo releases page for manual download
|
||||
await shell.openExternal('https://git.sanhost.net/sanasol/hytale-f2p/releases/latest');
|
||||
return { success: true };
|
||||
} catch (error) {
|
||||
console.error('Failed to open download page:', error);
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"name": "hytale-f2p-launcher",
|
||||
"version": "2.2.2",
|
||||
"version": "2.3.8",
|
||||
"description": "A modern, cross-platform launcher for Hytale with automatic updates and multi-client support",
|
||||
"homepage": "https://github.com/amiayweb/Hytale-F2P",
|
||||
"homepage": "https://git.sanhost.net/sanasol/hytale-f2p",
|
||||
"main": "main.js",
|
||||
"scripts": {
|
||||
"start": "electron .",
|
||||
@@ -118,9 +118,8 @@
|
||||
"createStartMenuShortcut": true
|
||||
},
|
||||
"publish": {
|
||||
"provider": "github",
|
||||
"owner": "amiayweb",
|
||||
"repo": "Hytale-F2P"
|
||||
"provider": "generic",
|
||||
"url": "https://git.sanhost.net/sanasol/hytale-f2p/releases/download/latest"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
523
test-uuid-persistence.js
Normal file
523
test-uuid-persistence.js
Normal file
@@ -0,0 +1,523 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* UUID Persistence Tests
|
||||
*
|
||||
* Simulates the exact conditions that caused character data loss:
|
||||
* - Config file corruption during updates
|
||||
* - File locks making config temporarily unreadable
|
||||
* - Username re-entry after config wipe
|
||||
*
|
||||
* Run: node test-uuid-persistence.js
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
|
||||
// Use a temp directory so we don't mess with real config
|
||||
const TEST_DIR = path.join(os.tmpdir(), 'hytale-uuid-test-' + Date.now());
|
||||
const CONFIG_FILE = path.join(TEST_DIR, 'config.json');
|
||||
const CONFIG_BACKUP = path.join(TEST_DIR, 'config.json.bak');
|
||||
const CONFIG_TEMP = path.join(TEST_DIR, 'config.json.tmp');
|
||||
const UUID_STORE_FILE = path.join(TEST_DIR, 'uuid-store.json');
|
||||
|
||||
// Track test results
|
||||
let passed = 0;
|
||||
let failed = 0;
|
||||
const failures = [];
|
||||
|
||||
function assert(condition, message) {
|
||||
if (condition) {
|
||||
passed++;
|
||||
console.log(` ✓ ${message}`);
|
||||
} else {
|
||||
failed++;
|
||||
failures.push(message);
|
||||
console.log(` ✗ FAIL: ${message}`);
|
||||
}
|
||||
}
|
||||
|
||||
function assertEqual(actual, expected, message) {
|
||||
if (actual === expected) {
|
||||
passed++;
|
||||
console.log(` ✓ ${message}`);
|
||||
} else {
|
||||
failed++;
|
||||
failures.push(`${message} (expected: ${expected}, got: ${actual})`);
|
||||
console.log(` ✗ FAIL: ${message} (expected: "${expected}", got: "${actual}")`);
|
||||
}
|
||||
}
|
||||
|
||||
function cleanup() {
|
||||
try {
|
||||
if (fs.existsSync(TEST_DIR)) {
|
||||
fs.rmSync(TEST_DIR, { recursive: true });
|
||||
}
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
function setup() {
|
||||
cleanup();
|
||||
fs.mkdirSync(TEST_DIR, { recursive: true });
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Inline the config functions so we can override paths
|
||||
// (We can't require config.js directly because it uses hardcoded getAppDir())
|
||||
// ============================================================================
|
||||
|
||||
function validateConfig(config) {
|
||||
if (!config || typeof config !== 'object') return false;
|
||||
if (config.userUuids !== undefined && typeof config.userUuids !== 'object') return false;
|
||||
if (config.username !== undefined && (typeof config.username !== 'string')) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
function loadConfig() {
|
||||
try {
|
||||
if (fs.existsSync(CONFIG_FILE)) {
|
||||
const data = fs.readFileSync(CONFIG_FILE, 'utf8');
|
||||
if (data.trim()) {
|
||||
const config = JSON.parse(data);
|
||||
if (validateConfig(config)) return config;
|
||||
console.warn('[Config] Primary config invalid structure, trying backup...');
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('[Config] Failed to load primary config:', err.message);
|
||||
}
|
||||
|
||||
try {
|
||||
if (fs.existsSync(CONFIG_BACKUP)) {
|
||||
const data = fs.readFileSync(CONFIG_BACKUP, 'utf8');
|
||||
if (data.trim()) {
|
||||
const config = JSON.parse(data);
|
||||
if (validateConfig(config)) {
|
||||
console.log('[Config] Recovered from backup successfully');
|
||||
try { fs.writeFileSync(CONFIG_FILE, data, 'utf8'); } catch (e) {}
|
||||
return config;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
function saveConfig(update) {
|
||||
const maxRetries = 3;
|
||||
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
||||
try {
|
||||
if (!fs.existsSync(TEST_DIR)) fs.mkdirSync(TEST_DIR, { recursive: true });
|
||||
|
||||
const currentConfig = loadConfig();
|
||||
|
||||
// SAFETY CHECK: refuse to save if file exists but loaded empty
|
||||
if (Object.keys(currentConfig).length === 0 && fs.existsSync(CONFIG_FILE)) {
|
||||
const fileSize = fs.statSync(CONFIG_FILE).size;
|
||||
if (fileSize > 2) {
|
||||
console.error(`[Config] REFUSING to save — loaded empty but file exists (${fileSize} bytes). Retrying...`);
|
||||
const delay = attempt * 50; // shorter delay for tests
|
||||
const start = Date.now();
|
||||
while (Date.now() - start < delay) {}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
const newConfig = { ...currentConfig, ...update };
|
||||
const data = JSON.stringify(newConfig, null, 2);
|
||||
|
||||
fs.writeFileSync(CONFIG_TEMP, data, 'utf8');
|
||||
const verification = JSON.parse(fs.readFileSync(CONFIG_TEMP, 'utf8'));
|
||||
if (!validateConfig(verification)) throw new Error('Validation failed');
|
||||
|
||||
if (fs.existsSync(CONFIG_FILE)) {
|
||||
try {
|
||||
const currentData = fs.readFileSync(CONFIG_FILE, 'utf8');
|
||||
if (currentData.trim()) fs.writeFileSync(CONFIG_BACKUP, currentData, 'utf8');
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
fs.renameSync(CONFIG_TEMP, CONFIG_FILE);
|
||||
return true;
|
||||
} catch (err) {
|
||||
try { if (fs.existsSync(CONFIG_TEMP)) fs.unlinkSync(CONFIG_TEMP); } catch (e) {}
|
||||
if (attempt >= maxRetries) throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function loadUuidStore() {
|
||||
try {
|
||||
if (fs.existsSync(UUID_STORE_FILE)) {
|
||||
const data = fs.readFileSync(UUID_STORE_FILE, 'utf8');
|
||||
if (data.trim()) return JSON.parse(data);
|
||||
}
|
||||
} catch (err) {}
|
||||
return {};
|
||||
}
|
||||
|
||||
function saveUuidStore(store) {
|
||||
const tmpFile = UUID_STORE_FILE + '.tmp';
|
||||
fs.writeFileSync(tmpFile, JSON.stringify(store, null, 2), 'utf8');
|
||||
fs.renameSync(tmpFile, UUID_STORE_FILE);
|
||||
}
|
||||
|
||||
function migrateUuidStoreIfNeeded() {
|
||||
if (fs.existsSync(UUID_STORE_FILE)) return;
|
||||
const config = loadConfig();
|
||||
if (config.userUuids && Object.keys(config.userUuids).length > 0) {
|
||||
console.log('[UUID Store] Migrating', Object.keys(config.userUuids).length, 'UUIDs');
|
||||
saveUuidStore(config.userUuids);
|
||||
}
|
||||
}
|
||||
|
||||
function getUuidForUser(username) {
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
if (!username || !username.trim()) throw new Error('Username required');
|
||||
|
||||
const displayName = username.trim();
|
||||
const normalizedLookup = displayName.toLowerCase();
|
||||
|
||||
migrateUuidStoreIfNeeded();
|
||||
|
||||
// 1. Check UUID store (source of truth)
|
||||
const uuidStore = loadUuidStore();
|
||||
const storeKey = Object.keys(uuidStore).find(k => k.toLowerCase() === normalizedLookup);
|
||||
if (storeKey) {
|
||||
const existingUuid = uuidStore[storeKey];
|
||||
if (storeKey !== displayName) {
|
||||
delete uuidStore[storeKey];
|
||||
uuidStore[displayName] = existingUuid;
|
||||
saveUuidStore(uuidStore);
|
||||
}
|
||||
// Sync to config (non-critical)
|
||||
try {
|
||||
const config = loadConfig();
|
||||
const configUuids = config.userUuids || {};
|
||||
const configKey = Object.keys(configUuids).find(k => k.toLowerCase() === normalizedLookup);
|
||||
if (!configKey || configUuids[configKey] !== existingUuid) {
|
||||
if (configKey) delete configUuids[configKey];
|
||||
configUuids[displayName] = existingUuid;
|
||||
saveConfig({ userUuids: configUuids });
|
||||
}
|
||||
} catch (e) {}
|
||||
return existingUuid;
|
||||
}
|
||||
|
||||
// 2. Fallback: check config.json
|
||||
const config = loadConfig();
|
||||
const userUuids = config.userUuids || {};
|
||||
const configKey = Object.keys(userUuids).find(k => k.toLowerCase() === normalizedLookup);
|
||||
if (configKey) {
|
||||
const recoveredUuid = userUuids[configKey];
|
||||
uuidStore[displayName] = recoveredUuid;
|
||||
saveUuidStore(uuidStore);
|
||||
return recoveredUuid;
|
||||
}
|
||||
|
||||
// 3. New user — generate UUID
|
||||
const newUuid = uuidv4();
|
||||
uuidStore[displayName] = newUuid;
|
||||
saveUuidStore(uuidStore);
|
||||
userUuids[displayName] = newUuid;
|
||||
saveConfig({ userUuids });
|
||||
return newUuid;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// OLD CODE (before fix) — for comparison testing
|
||||
// ============================================================================
|
||||
|
||||
function getUuidForUser_OLD(username) {
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
if (!username || !username.trim()) throw new Error('Username required');
|
||||
const displayName = username.trim();
|
||||
const normalizedLookup = displayName.toLowerCase();
|
||||
|
||||
const config = loadConfig();
|
||||
const userUuids = config.userUuids || {};
|
||||
const existingKey = Object.keys(userUuids).find(k => k.toLowerCase() === normalizedLookup);
|
||||
|
||||
if (existingKey) {
|
||||
return userUuids[existingKey];
|
||||
}
|
||||
|
||||
// New user
|
||||
const newUuid = uuidv4();
|
||||
userUuids[displayName] = newUuid;
|
||||
saveConfig({ userUuids });
|
||||
return newUuid;
|
||||
}
|
||||
|
||||
function saveConfig_OLD(update) {
|
||||
// OLD saveConfig without safety check
|
||||
if (!fs.existsSync(TEST_DIR)) fs.mkdirSync(TEST_DIR, { recursive: true });
|
||||
const currentConfig = loadConfig();
|
||||
// NO SAFETY CHECK — this is the bug
|
||||
const newConfig = { ...currentConfig, ...update };
|
||||
fs.writeFileSync(CONFIG_FILE, JSON.stringify(newConfig, null, 2), 'utf8');
|
||||
return true;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// TESTS
|
||||
// ============================================================================
|
||||
|
||||
console.log('\n' + '='.repeat(70));
|
||||
console.log('UUID PERSISTENCE TESTS — Simulating update corruption scenarios');
|
||||
console.log('='.repeat(70));
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// TEST 1: Normal flow — UUID stays consistent
|
||||
// --------------------------------------------------------------------------
|
||||
console.log('\n--- Test 1: Normal flow — UUID stays consistent ---');
|
||||
setup();
|
||||
|
||||
const uuid1 = getUuidForUser('SpecialK');
|
||||
const uuid2 = getUuidForUser('SpecialK');
|
||||
const uuid3 = getUuidForUser('specialk'); // case insensitive
|
||||
|
||||
assertEqual(uuid1, uuid2, 'Same username returns same UUID');
|
||||
assertEqual(uuid1, uuid3, 'Case-insensitive lookup returns same UUID');
|
||||
assert(uuid1.match(/^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i), 'UUID is valid v4 format');
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// TEST 2: Simulate update corruption (THE BUG) — old code
|
||||
// --------------------------------------------------------------------------
|
||||
console.log('\n--- Test 2: OLD CODE — Config wipe during update loses UUID ---');
|
||||
setup();
|
||||
|
||||
// Setup: player has UUID
|
||||
const oldConfig = { username: 'SpecialK', userUuids: { 'SpecialK': 'aaaaaaaa-bbbb-4ccc-9ddd-eeeeeeeeeeee' }, hasLaunchedBefore: true };
|
||||
fs.writeFileSync(CONFIG_FILE, JSON.stringify(oldConfig, null, 2), 'utf8');
|
||||
|
||||
const uuidBefore = getUuidForUser_OLD('SpecialK');
|
||||
assertEqual(uuidBefore, 'aaaaaaaa-bbbb-4ccc-9ddd-eeeeeeeeeeee', 'UUID correct before corruption');
|
||||
|
||||
// Simulate: config.json gets corrupted (loadConfig returns {} because file locked)
|
||||
// This simulates what happens when saveConfig reads an empty/locked file
|
||||
fs.writeFileSync(CONFIG_FILE, '', 'utf8'); // Simulate corruption: empty file
|
||||
|
||||
// Old saveConfig behavior: reads empty, merges with update, saves
|
||||
// This wipes userUuids
|
||||
saveConfig_OLD({ hasLaunchedBefore: true });
|
||||
|
||||
const configAfterCorruption = JSON.parse(fs.readFileSync(CONFIG_FILE, 'utf8'));
|
||||
assert(!configAfterCorruption.userUuids, 'OLD CODE: userUuids wiped after corruption');
|
||||
assert(!configAfterCorruption.username, 'OLD CODE: username wiped after corruption');
|
||||
|
||||
// Player re-enters name, gets NEW UUID (character data lost!)
|
||||
const uuidAfterOld = getUuidForUser_OLD('SpecialK');
|
||||
assert(uuidAfterOld !== uuidBefore, 'OLD CODE: UUID changed after corruption (BUG!)');
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// TEST 3: NEW CODE — Config wipe during update, UUID survives via uuid-store
|
||||
// --------------------------------------------------------------------------
|
||||
console.log('\n--- Test 3: NEW CODE — Config wipe + UUID survives via uuid-store ---');
|
||||
setup();
|
||||
|
||||
// Setup: player has UUID (stored in both config.json AND uuid-store.json)
|
||||
const initialConfig = { username: 'SpecialK', userUuids: { 'SpecialK': 'aaaaaaaa-bbbb-4ccc-9ddd-eeeeeeeeeeee' }, hasLaunchedBefore: true };
|
||||
fs.writeFileSync(CONFIG_FILE, JSON.stringify(initialConfig, null, 2), 'utf8');
|
||||
|
||||
// First call migrates to uuid-store
|
||||
const uuidFirst = getUuidForUser('SpecialK');
|
||||
assertEqual(uuidFirst, 'aaaaaaaa-bbbb-4ccc-9ddd-eeeeeeeeeeee', 'UUID correct before corruption');
|
||||
assert(fs.existsSync(UUID_STORE_FILE), 'uuid-store.json created');
|
||||
|
||||
// Simulate: config.json gets wiped (same as the update bug)
|
||||
fs.writeFileSync(CONFIG_FILE, '{}', 'utf8');
|
||||
|
||||
// Verify config is empty
|
||||
const wipedConfig = loadConfig();
|
||||
assert(!wipedConfig.userUuids || Object.keys(wipedConfig.userUuids).length === 0, 'Config wiped — no userUuids');
|
||||
assert(!wipedConfig.username, 'Config wiped — no username');
|
||||
|
||||
// Player re-enters same name → UUID recovered from uuid-store!
|
||||
const uuidAfterNew = getUuidForUser('SpecialK');
|
||||
assertEqual(uuidAfterNew, 'aaaaaaaa-bbbb-4ccc-9ddd-eeeeeeeeeeee', 'NEW CODE: UUID preserved after config wipe!');
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// TEST 4: saveConfig safety check — refuses to overwrite good data with empty
|
||||
// --------------------------------------------------------------------------
|
||||
console.log('\n--- Test 4: saveConfig safety check — blocks destructive writes ---');
|
||||
setup();
|
||||
|
||||
// Setup: valid config file with data
|
||||
const goodConfig = { username: 'SpecialK', userUuids: { 'SpecialK': 'aaaaaaaa-bbbb-4ccc-9ddd-eeeeeeeeeeee' }, hasLaunchedBefore: true, installPath: 'C:\\Games\\Hytale' };
|
||||
fs.writeFileSync(CONFIG_FILE, JSON.stringify(goodConfig, null, 2), 'utf8');
|
||||
|
||||
// Make the file temporarily unreadable by writing garbage (simulates file lock/corruption)
|
||||
const originalContent = fs.readFileSync(CONFIG_FILE, 'utf8');
|
||||
fs.writeFileSync(CONFIG_FILE, 'NOT VALID JSON!!!', 'utf8');
|
||||
|
||||
// Try to save — should refuse because file exists but can't be parsed
|
||||
let saveThrew = false;
|
||||
try {
|
||||
saveConfig({ someNewField: true });
|
||||
} catch (e) {
|
||||
saveThrew = true;
|
||||
}
|
||||
|
||||
// The file should still have the garbage (not overwritten with { someNewField: true })
|
||||
const afterContent = fs.readFileSync(CONFIG_FILE, 'utf8');
|
||||
|
||||
// Restore original for backup recovery test
|
||||
fs.writeFileSync(CONFIG_FILE, JSON.stringify(goodConfig, null, 2), 'utf8');
|
||||
|
||||
// Note: with invalid JSON, loadConfig returns {} and safety check triggers
|
||||
// The save may eventually succeed on retry if the file becomes readable
|
||||
// What matters is that it doesn't blindly overwrite
|
||||
assert(afterContent !== '{\n "someNewField": true\n}', 'Safety check prevented blind overwrite of corrupted file');
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// TEST 5: Backup recovery — config.json corrupted, recovered from .bak
|
||||
// --------------------------------------------------------------------------
|
||||
console.log('\n--- Test 5: Backup recovery — auto-recover from .bak ---');
|
||||
setup();
|
||||
|
||||
// Create config and backup
|
||||
const validConfig = { username: 'SpecialK', userUuids: { 'SpecialK': 'aaaaaaaa-bbbb-4ccc-9ddd-eeeeeeeeeeee' } };
|
||||
fs.writeFileSync(CONFIG_BACKUP, JSON.stringify(validConfig, null, 2), 'utf8');
|
||||
fs.writeFileSync(CONFIG_FILE, 'CORRUPTED', 'utf8');
|
||||
|
||||
const recovered = loadConfig();
|
||||
assertEqual(recovered.username, 'SpecialK', 'Username recovered from backup');
|
||||
assert(recovered.userUuids && recovered.userUuids['SpecialK'] === 'aaaaaaaa-bbbb-4ccc-9ddd-eeeeeeeeeeee', 'UUID recovered from backup');
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// TEST 6: Full update simulation — the exact scenario from player report
|
||||
// --------------------------------------------------------------------------
|
||||
console.log('\n--- Test 6: Full update simulation (player report scenario) ---');
|
||||
setup();
|
||||
|
||||
// Step 1: Player installs v2.3.4, sets username, plays game
|
||||
console.log(' Step 1: Player sets up profile...');
|
||||
saveConfig({ username: 'Special K', hasLaunchedBefore: true });
|
||||
const originalUuid = getUuidForUser('Special K');
|
||||
console.log(` Original UUID: ${originalUuid}`);
|
||||
|
||||
// Step 2: v2.3.5 auto-update — new app launches
|
||||
console.log(' Step 2: Simulating v2.3.5 update...');
|
||||
|
||||
// Simulate the 3 saveConfig calls that happen during startup
|
||||
// But first, simulate config being temporarily locked (returns empty)
|
||||
const preUpdateContent = fs.readFileSync(CONFIG_FILE, 'utf8');
|
||||
fs.writeFileSync(CONFIG_FILE, '', 'utf8'); // Simulate: file empty during write (race condition)
|
||||
|
||||
// These are the 3 calls from: profileManager.init, migrateUserDataToCentralized, handleFirstLaunchCheck
|
||||
// With our safety check, they should NOT wipe the data
|
||||
try { saveConfig({ hasLaunchedBefore: true }); } catch (e) { /* expected — safety check blocks it */ }
|
||||
|
||||
// Simulate file becomes readable again (antivirus releases lock)
|
||||
fs.writeFileSync(CONFIG_FILE, preUpdateContent, 'utf8');
|
||||
|
||||
// Step 3: Player re-enters username (because UI might show empty)
|
||||
console.log(' Step 3: Player re-enters username...');
|
||||
const postUpdateUuid = getUuidForUser('Special K');
|
||||
console.log(` Post-update UUID: ${postUpdateUuid}`);
|
||||
|
||||
assertEqual(postUpdateUuid, originalUuid, 'UUID survived the full update cycle!');
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// TEST 7: Multiple users — UUIDs stay independent
|
||||
// --------------------------------------------------------------------------
|
||||
console.log('\n--- Test 7: Multiple users — UUIDs stay independent ---');
|
||||
setup();
|
||||
|
||||
const uuidAlice = getUuidForUser('Alice');
|
||||
const uuidBob = getUuidForUser('Bob');
|
||||
const uuidCharlie = getUuidForUser('Charlie');
|
||||
|
||||
assert(uuidAlice !== uuidBob, 'Alice and Bob have different UUIDs');
|
||||
assert(uuidBob !== uuidCharlie, 'Bob and Charlie have different UUIDs');
|
||||
|
||||
// Wipe config, all should survive
|
||||
fs.writeFileSync(CONFIG_FILE, '{}', 'utf8');
|
||||
|
||||
assertEqual(getUuidForUser('Alice'), uuidAlice, 'Alice UUID survived config wipe');
|
||||
assertEqual(getUuidForUser('Bob'), uuidBob, 'Bob UUID survived config wipe');
|
||||
assertEqual(getUuidForUser('Charlie'), uuidCharlie, 'Charlie UUID survived config wipe');
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// TEST 8: UUID store deleted — recovery from config.json
|
||||
// --------------------------------------------------------------------------
|
||||
console.log('\n--- Test 8: UUID store deleted — recovery from config.json ---');
|
||||
setup();
|
||||
|
||||
// Create UUID via normal flow (saves to both stores)
|
||||
const uuidOriginal = getUuidForUser('TestPlayer');
|
||||
|
||||
// Delete uuid-store.json (simulates user manually deleting it or disk issue)
|
||||
fs.unlinkSync(UUID_STORE_FILE);
|
||||
assert(!fs.existsSync(UUID_STORE_FILE), 'uuid-store.json deleted');
|
||||
|
||||
// UUID should be recovered from config.json
|
||||
const uuidRecovered = getUuidForUser('TestPlayer');
|
||||
assertEqual(uuidRecovered, uuidOriginal, 'UUID recovered from config.json after uuid-store deletion');
|
||||
assert(fs.existsSync(UUID_STORE_FILE), 'uuid-store.json recreated after recovery');
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// TEST 9: Both stores deleted — new UUID generated (fresh install)
|
||||
// --------------------------------------------------------------------------
|
||||
console.log('\n--- Test 9: Both stores deleted — new UUID (fresh install) ---');
|
||||
setup();
|
||||
|
||||
const uuidFresh = getUuidForUser('NewPlayer');
|
||||
|
||||
// Delete both
|
||||
fs.unlinkSync(UUID_STORE_FILE);
|
||||
fs.unlinkSync(CONFIG_FILE);
|
||||
|
||||
const uuidAfterWipe = getUuidForUser('NewPlayer');
|
||||
assert(uuidAfterWipe !== uuidFresh, 'New UUID generated when both stores are gone (expected for true fresh install)');
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// TEST 10: Worst case — config.json wiped AND uuid-store.json exists
|
||||
// Simulates the EXACT player-reported scenario with new code
|
||||
// --------------------------------------------------------------------------
|
||||
console.log('\n--- Test 10: Exact player scenario with new code ---');
|
||||
setup();
|
||||
|
||||
// Player has been playing for a while
|
||||
saveConfig({
|
||||
username: 'Special K',
|
||||
hasLaunchedBefore: true,
|
||||
installPath: 'C:\\Games\\Hytale',
|
||||
version_client: '2026.02.19-1a311a592',
|
||||
version_branch: 'release',
|
||||
userUuids: { 'Special K': '11111111-2222-4333-9444-555555555555' }
|
||||
});
|
||||
|
||||
// First call creates uuid-store.json
|
||||
const originalUuid10 = getUuidForUser('Special K');
|
||||
assertEqual(originalUuid10, '11111111-2222-4333-9444-555555555555', 'Original UUID loaded');
|
||||
|
||||
// BOOM: Update happens, config.json completely wiped
|
||||
fs.writeFileSync(CONFIG_FILE, '{}', 'utf8');
|
||||
|
||||
// Username lost — player has to re-enter
|
||||
const loadedUsername = loadConfig().username;
|
||||
assert(!loadedUsername, 'Username is gone from config (simulating what player saw)');
|
||||
|
||||
// Player types "Special K" again in settings
|
||||
saveConfig({ username: 'Special K' });
|
||||
|
||||
// Player clicks Play — getUuidForUser called
|
||||
const recoveredUuid10 = getUuidForUser('Special K');
|
||||
assertEqual(recoveredUuid10, '11111111-2222-4333-9444-555555555555', 'UUID recovered — character data preserved!');
|
||||
|
||||
// ============================================================================
|
||||
// RESULTS
|
||||
// ============================================================================
|
||||
console.log('\n' + '='.repeat(70));
|
||||
console.log(`RESULTS: ${passed} passed, ${failed} failed`);
|
||||
if (failed > 0) {
|
||||
console.log('\nFailures:');
|
||||
failures.forEach(f => console.log(` ✗ ${f}`));
|
||||
}
|
||||
console.log('='.repeat(70));
|
||||
|
||||
cleanup();
|
||||
process.exit(failed > 0 ? 1 : 0);
|
||||
Reference in New Issue
Block a user