Compare commits

..

11 Commits

Author SHA1 Message Date
JamesFlare1212
f21a400c82 fix(auth): prevent cookie loss during remote server timeout storms
Server timeouts caused orphaned fetchActivityData calls to fire clearCookieCache()
asynchronously, destroying cookies for all concurrent callers. Three fixes:

1. Replace Promise.race timeout with AbortController to properly cancel
   orphaned fetches and prevent delayed clearCookieCache() calls
2. Add cookie backup/restore — backupCookies() before clearCookieCache(),
   restoreCookieBackup() if re-login fails, so cookies are never lost
3. Add 15s auth failure throttle to block thundering herd re-logins when
   server slowdowns generate many 500 errors simultaneously
2026-04-23 03:06:15 -04:00
JamesFlare1212
73e953f579 fix(auth): distinguish 500 (cookie expired) from other 5xx (real outage)
KEY INSIGHT:
- 500 = cookie expiration (early signal, re-login immediately)
- 502/503/504 = real server outage (bad gateway, service unavailable, gateway timeout)

BEHAVIOR:
- On 500: throw AuthenticationError → immediate re-login
- On 502/503/504: preserve cache, don't re-login (server is down)
- On 401/403: throw AuthenticationError → re-login

This prevents unnecessary re-login attempts during actual server outages
while still handling cookie expiration immediately.
2026-04-11 11:43:35 -04:00
JamesFlare1212
71116f9f6e fix(auth): treat 5xx as cookie expiration and re-login immediately
KEY DISCOVERY: 5xx errors are early signs of cookie expiration.
The backend returns 500 when cookie is expired but session not yet invalidated.
It takes several hours before it returns 401/403.

CHANGES:
1. On 5xx: throw AuthenticationError to trigger immediate re-login
2. Removed cookie validation logic (no longer needed)
3. Cache still preserved during re-login process
4. Re-login happens within same request, not on next request

This fixes the issue where expired cookies would cause 5xx errors
for hours before any re-login attempt was made.
2026-04-11 10:55:31 -04:00
JamesFlare1212
13eccdd3cc fix(validation): use activity 3350 and detect server outage
Cookie validation improvements:
1. Validate with activity ID 3350 (more reliable test endpoint)
2. Distinguish 5xx (outage) from 4xx (invalid cookie) during validation
3. On 5xx during validation: preserve cookie, don't re-login (server outage)
4. On 401/403 during validation: clear cookie and re-login
5. On network error during validation: preserve cookie (treat as server issue)

This prevents unnecessary re-logins during server outages.
2026-04-10 23:41:51 -04:00
JamesFlare1212
c447dc51ee fix(cache): prevent data loss on 5xx and validate cookies
Critical fixes:
1. getActivityDetailsRaw never throws on 5xx - returns null immediately
2. Cache-manager preserves existing data when fetch returns null
3. After 5xx error, validate cookie on next request (backend may invalidate sessions)
4. Cookie validation: fetch activity ID 1 to test, re-login if fails

This prevents local cache corruption during server outages.
2026-04-10 23:41:18 -04:00
JamesFlare1212
5fb60b069f fix(cache): preserve local data on remote 5xx errors
Only update cache when remote returns HTTP 200. On 5xx errors or timeouts,
preserve existing local cache instead of overwriting with empty/error data.
2026-04-08 12:07:51 -04:00
JamesFlare1212
fb68c1ad5d refactor(scan): remove multi-thread scan logic, use sequential processing 2026-04-08 12:04:27 -04:00
JamesFlare1212
78c050a6fa refactor(s3): remove automatic image deletion, users manage S3 files 2026-04-08 10:29:27 -04:00
JamesFlare1212
1e234624fb fix(s3): URL mismatch 2026-04-08 00:00:44 -04:00
JamesFlare1212
6c58eacc8f fix(s3): racing condition and different URL in redis 2026-04-07 23:21:54 -04:00
JamesFlare1212
bbbd59be94 fix(s3): updating clean all files in s3 2026-04-07 22:45:10 -04:00
6 changed files with 198 additions and 144 deletions

View File

@@ -1,12 +1,15 @@
// engage-api/get-activity.ts // engage-api/get-activity.ts
import axios from 'axios'; import axios, { type AxiosRequestConfig } from 'axios';
import { logger } from '../utils/logger'; import { logger } from '../utils/logger';
import { import {
ensureSingleLogin, ensureSingleLogin,
loadCachedCookies,
saveCookiesToCache, saveCookiesToCache,
clearCookieCache, clearCookieCache,
getCachedCookieString getCachedCookieString,
backupCookies,
restoreCookieBackup,
tryAcquireAuthLock,
releaseAuthCooldown
} from '../services/playwright-auth'; } from '../services/playwright-auth';
// Define interfaces for our data structures // Define interfaces for our data structures
@@ -45,12 +48,14 @@ async function getCompleteCookies(userName: string, userPwd: string): Promise<st
/** /**
* Get activity details from API * Get activity details from API
* Only returns data on HTTP 200. Returns null on any error (5xx, timeout, etc.)
*/ */
async function getActivityDetailsRaw( async function getActivityDetailsRaw(
activityId: string, activityId: string,
cookies: string, cookies: string,
maxRetries: number = 3, maxRetries: number = 3,
timeoutMilliseconds: number = 10000 timeoutMilliseconds: number = 10000,
signal?: AbortSignal
): Promise<string | null> { ): Promise<string | null> {
const url = 'https://engage.nkcswx.cn/Services/ActivitiesService.asmx/GetActivityDetails'; const url = 'https://engage.nkcswx.cn/Services/ActivitiesService.asmx/GetActivityDetails';
const headers = { const headers = {
@@ -64,15 +69,40 @@ async function getActivityDetailsRaw(
}; };
for (let attempt = 0; attempt < maxRetries; attempt++) { for (let attempt = 0; attempt < maxRetries; attempt++) {
if (signal?.aborted) {
logger.debug(`Activity ${activityId} aborted before attempt ${attempt + 1}`);
return null;
}
try { try {
logger.debug(`Attempt ${attempt + 1}/${maxRetries} for activity ${activityId} - Sending POST request to ${url}`); logger.debug(`Attempt ${attempt + 1}/${maxRetries} for activity ${activityId} - Sending POST request to ${url}`);
const response = await axios.post(url, payload, { const response = await axios.post(url, payload, {
headers, headers,
timeout: timeoutMilliseconds, timeout: timeoutMilliseconds,
responseType: 'text', responseType: 'text',
// Add additional timeout safety signal,
maxRedirects: 5 maxRedirects: 5
}); });
// CRITICAL: Only accept HTTP 200. Reject all other status codes including 5xx
if (response.status !== 200) {
logger.error(`Non-200 status ${response.status} for activity ${activityId}. NOT updating cache to preserve local data.`);
// IMPORTANT: Only 500 is cookie expiration. Other 5xx (502/503/504) are real server outages.
// The backend returns 500 when cookie is expired but session not yet invalidated.
// It takes several hours before it returns 401/403.
// 502/503/504 are real server errors (bad gateway, service unavailable, gateway timeout)
if (response.status === 500) {
logger.warn(`Server error 500 - this is cookie expiration. Throwing AuthenticationError to trigger immediate re-login.`);
throw new AuthenticationError(`Received 500 for activity ${activityId} - expired cookie`, 500);
} else if (response.status >= 500 && response.status < 600) {
// Real server outage (502/503/504), preserve cache and don't re-login
logger.error(`Real server outage ${response.status} - preserving local cache, not re-login.`);
}
// Return null immediately on non-200 errors
return null;
}
logger.debug(`Attempt ${attempt + 1}/${maxRetries} for activity ${activityId} - Received response status ${response.status}`); logger.debug(`Attempt ${attempt + 1}/${maxRetries} for activity ${activityId} - Received response status ${response.status}`);
const outerData = JSON.parse(response.data); const outerData = JSON.parse(response.data);
if (outerData && typeof outerData.d === 'string') { if (outerData && typeof outerData.d === 'string') {
@@ -86,9 +116,10 @@ async function getActivityDetailsRaw(
logger.error(`Unexpected API response structure for activity ${activityId}.`); logger.error(`Unexpected API response structure for activity ${activityId}.`);
} }
} catch (error: any) { } catch (error: any) {
// Only treat 401 (Unauthorized) and 403 (Forbidden) as authentication errors // Only treat 401 (Unauthorized) and 403 (Forbidden) as authentication errors
// 404 (Not Found) is valid - activity doesn't exist // 404 (Not Found) is valid - activity doesn't exist
// Other 4xx errors should not trigger re-authentication // Other 4xx/5xx errors should not trigger re-authentication
if (error.response && (error.response.status === 401 || error.response.status === 403)) { if (error.response && (error.response.status === 401 || error.response.status === 403)) {
logger.warn(`Authentication error (${error.response.status}) while fetching activity ${activityId}. Cookie may be invalid.`); logger.warn(`Authentication error (${error.response.status}) while fetching activity ${activityId}. Cookie may be invalid.`);
throw new AuthenticationError(`Received ${error.response.status} for activity ${activityId}`, error.response.status); throw new AuthenticationError(`Received ${error.response.status} for activity ${activityId}`, error.response.status);
@@ -97,10 +128,21 @@ async function getActivityDetailsRaw(
if (error.response) { if (error.response) {
logger.error(`Status: ${error.response.status}, Data (getActivityDetailsRaw): ${ String(error.response.data).slice(0,100)}...`); logger.error(`Status: ${error.response.status}, Data (getActivityDetailsRaw): ${ String(error.response.data).slice(0,100)}...`);
// IMPORTANT: Only 500 is cookie expiration. Other 5xx (502/503/504) are real server outages.
// The backend returns 500 when cookie is expired but session not yet invalidated.
// 502/503/504 are real server errors (bad gateway, service unavailable, gateway timeout)
if (error.response.status === 500) {
logger.warn(`Server error 500 - this is cookie expiration. Throwing AuthenticationError to trigger immediate re-login.`);
throw new AuthenticationError(`Received 500 for activity ${activityId} - expired cookie`, 500);
} else if (error.response.status >= 500 && error.response.status < 600) {
// Real server outage (502/503/504), preserve cache and don't re-login
logger.error(`Real server outage ${error.response.status} - preserving local cache, not re-login.`);
}
} }
if (attempt === maxRetries - 1) { if (attempt === maxRetries - 1) {
logger.error(`All ${maxRetries} retries failed for activity ${activityId}.`); logger.error(`All ${maxRetries} retries failed for activity ${activityId}.`);
throw error; // Don't throw on network/timeout errors, just return null to preserve cache
return null;
} }
await new Promise(resolve => setTimeout(resolve, 1000 * (attempt + 1))); await new Promise(resolve => setTimeout(resolve, 1000 * (attempt + 1)));
} }
@@ -121,6 +163,7 @@ export async function fetchActivityData(
userName: string, userName: string,
userPwd: string, userPwd: string,
forceLogin: boolean = false, forceLogin: boolean = false,
signal?: AbortSignal
): Promise<any | null> { ): Promise<any | null> {
let currentCookie = forceLogin ? null : await getCachedCookieString(); let currentCookie = forceLogin ? null : await getCachedCookieString();
@@ -130,17 +173,10 @@ export async function fetchActivityData(
currentCookie = null; currentCookie = null;
} }
// Optimization: Skip pre-validation, directly request data
// Only validate/re-login when we get 4xx error (fail-fast strategy)
if (!currentCookie) { if (!currentCookie) {
logger.info('No cached cookie found. Attempting login...'); logger.info('No cached cookie found. Attempting login...');
try { try {
currentCookie = await getCompleteCookies(userName, userPwd); currentCookie = await getCompleteCookies(userName, userPwd);
const cookies = await loadCachedCookies();
if (cookies) {
await saveCookiesToCache(cookies);
}
} catch (loginError) { } catch (loginError) {
logger.error(`Login process failed: ${(loginError as Error).message}`); logger.error(`Login process failed: ${(loginError as Error).message}`);
return null; return null;
@@ -152,11 +188,12 @@ export async function fetchActivityData(
return null; return null;
} }
logger.debug('Using cached cookie for API request.'); logger.debug('Using cached cookie for API request.');
try { try {
logger.debug(`Calling getActivityDetailsRaw for activity ${activityId}...`); logger.debug(`Calling getActivityDetailsRaw for activity ${activityId}...`);
const rawActivityDetailsString = await getActivityDetailsRaw(activityId, currentCookie); const rawActivityDetailsString = await getActivityDetailsRaw(activityId, currentCookie, 3, 10000, signal);
logger.debug(`getActivityDetailsRaw returned for activity ${activityId}`); logger.debug(`getActivityDetailsRaw returned for activity ${activityId}`);
if (rawActivityDetailsString) { if (rawActivityDetailsString) {
const parsedOuter = JSON.parse(rawActivityDetailsString); const parsedOuter = JSON.parse(rawActivityDetailsString);
@@ -165,22 +202,28 @@ export async function fetchActivityData(
logger.warn(`No data returned from getActivityDetailsRaw for activity ${activityId}, but no authentication error was thrown.`); logger.warn(`No data returned from getActivityDetailsRaw for activity ${activityId}, but no authentication error was thrown.`);
return null; return null;
} catch (error) { } catch (error) {
if (signal?.aborted) {
logger.debug(`Activity ${activityId} fetch aborted.`);
return null;
}
if (error instanceof AuthenticationError) { if (error instanceof AuthenticationError) {
// Cookie returned 4xx, now validate and re-login // Throttle: prevent thundering herd from multiple 500 errors
logger.warn(`API returned 4xx error (Status: ${error.status}). Cookie may be invalid. Attempting re-login and retry.`); if (!tryAcquireAuthLock()) {
logger.info(`Auth throttled for activity ${activityId}. Reusing current cookies — likely still valid.`);
return null;
}
// Backup cookies before clearing so we can restore on re-login failure
backupCookies();
await clearCookieCache(); await clearCookieCache();
try { try {
logger.info('Attempting re-login due to authentication failure...'); logger.info('Attempting re-login due to authentication failure...');
currentCookie = await getCompleteCookies(userName, userPwd); currentCookie = await getCompleteCookies(userName, userPwd);
releaseAuthCooldown();
const cookies = await loadCachedCookies();
if (cookies) {
await saveCookiesToCache(cookies);
}
logger.info('Re-login successful. Retrying request for activity details...'); logger.info('Re-login successful. Retrying request for activity details...');
const rawActivityDetailsStringRetry = await getActivityDetailsRaw(activityId, currentCookie); const rawActivityDetailsStringRetry = await getActivityDetailsRaw(activityId, currentCookie, 1, 10000, signal);
if (rawActivityDetailsStringRetry) { if (rawActivityDetailsStringRetry) {
const parsedOuterRetry = JSON.parse(rawActivityDetailsStringRetry); const parsedOuterRetry = JSON.parse(rawActivityDetailsStringRetry);
return JSON.parse(parsedOuterRetry.d); return JSON.parse(parsedOuterRetry.d);
@@ -188,7 +231,9 @@ export async function fetchActivityData(
logger.warn(`Still no details for activity ${activityId} after re-login and retry.`); logger.warn(`Still no details for activity ${activityId} after re-login and retry.`);
return null; return null;
} catch (retryLoginOrFetchError) { } catch (retryLoginOrFetchError) {
logger.error(`Error during re-login or retry fetch for activity ${activityId}: ${(retryLoginOrFetchError as Error).message}`); logger.error(`Re-login or retry failed for activity ${activityId}: ${(retryLoginOrFetchError as Error).message}`);
// Restore old cookies instead of leaving cache empty
await restoreCookieBackup();
return null; return null;
} }
} else { } else {

View File

@@ -21,7 +21,6 @@ MAX_ACTIVITY_ID_SCAN=8000
# Maximum concurrent API calls during crawling (default: 8) # Maximum concurrent API calls during crawling (default: 8)
# Higher values = faster crawling but more server load # Higher values = faster crawling but more server load
# Set to 1 for sequential processing (slow but safe)
CONCURRENT_API_CALLS=8 CONCURRENT_API_CALLS=8
# Request timeout in milliseconds (default: 25000 = 25 seconds) # Request timeout in milliseconds (default: 25000 = 25 seconds)

View File

@@ -38,8 +38,7 @@ import { extractBase64Image } from './utils/image-processor';
import { import {
initializeClubCache, initializeClubCache,
updateStaleClubs, updateStaleClubs,
initializeOrUpdateStaffCache, initializeOrUpdateStaffCache
cleanupOrphanedS3Images
} from './services/cache-manager'; } from './services/cache-manager';
import { logger } from './utils/logger'; import { logger } from './utils/logger';
import type { ActivityData } from './models/activity' import type { ActivityData } from './models/activity'
@@ -421,7 +420,10 @@ async function performBackgroundTasks(): Promise<void> {
try { try {
await initializeClubCache(); await initializeClubCache();
await initializeOrUpdateStaffCache(true); await initializeOrUpdateStaffCache(true);
await cleanupOrphanedS3Images(); // NOTE: Removed immediate cleanupOrphanedS3Images() call.
// Cleanup will run during periodic updateStaleClubs() instead.
// Running cleanup immediately after initialization caused race condition
// where newly uploaded images were deleted before they could be referenced.
logger.info(`Setting up periodic club cache updates every ${CLUB_CHECK_INTERVAL_SECONDS} seconds.`); logger.info(`Setting up periodic club cache updates every ${CLUB_CHECK_INTERVAL_SECONDS} seconds.`);
setInterval(() => { setInterval(() => {

View File

@@ -11,10 +11,10 @@ import {
getAllActivityKeys, getAllActivityKeys,
ACTIVITY_KEY_PREFIX ACTIVITY_KEY_PREFIX
} from './redis-service'; } from './redis-service';
import { uploadImageFromBase64, listS3Objects, deleteS3Objects, constructS3Url } from './s3-service'; import { uploadImageFromBase64, listS3Objects, constructS3Url } from './s3-service';
import { extractBase64Image } from '../utils/image-processor'; import { extractBase64Image } from '../utils/image-processor';
import { logger } from '../utils/logger'; import { logger } from '../utils/logger';
import { BatchProcessor, executeWithConcurrencyAndProgress } from '../utils/semaphore'; import { BatchProcessor } from '../utils/semaphore';
import type { ActivityData } from '../models/activity'; import type { ActivityData } from '../models/activity';
@@ -42,31 +42,61 @@ let skippedCount = 0;
/** /**
* Process and cache a single activity * Process and cache a single activity
* @param activityId - The activity ID to process * @param activityId - The activity ID to process
* @param forceUpdate - If true, update cache even on fetch failure (default: false)
* @returns The processed activity data * @returns The processed activity data
*/ */
async function processAndCacheActivity(activityId: string): Promise<ActivityData> { async function processAndCacheActivity(activityId: string, forceUpdate: boolean = false): Promise<ActivityData> {
logger.debug(`Processing activity ID: ${activityId}`); logger.debug(`Processing activity ID: ${activityId}`);
try { try {
if (!USERNAME || !PASSWORD) { if (!USERNAME || !PASSWORD) {
throw new Error('API username or password not configured'); throw new Error('API username or password not configured');
} }
// Add timeout protection for the entire fetch operation // Add timeout protection via AbortController - properly cancels orphaned fetches
logger.debug(`Fetching activity data for ID: ${activityId}`); logger.debug(`Fetching activity data for ID: ${activityId}`);
const activityJson = await Promise.race([ const controller = new AbortController();
fetchActivityData(activityId, USERNAME, PASSWORD, false), const timeoutId = setTimeout(
new Promise((_, reject) => () => controller.abort(),
setTimeout(() => reject(new Error(`Timeout fetching activity ${activityId} after ${CRAWLER_REQUEST_TIMEOUT_MS}ms`)), CRAWLER_REQUEST_TIMEOUT_MS + 5000) CRAWLER_REQUEST_TIMEOUT_MS + 5000
) );
]);
let activityJson: any = null;
try {
activityJson = await fetchActivityData(
activityId,
USERNAME,
PASSWORD,
false,
controller.signal
);
} finally {
clearTimeout(timeoutId);
}
if (controller.signal.aborted) {
logger.warn(`Request for activity ${activityId} timed out after ${CRAWLER_REQUEST_TIMEOUT_MS + 5000}ms. Cancelling orphaned fetch.`);
// Preserve existing cache on timeout
const existingData = await getActivityData(activityId);
return existingData || { lastCheck: new Date().toISOString(), error: `Timeout after ${CRAWLER_REQUEST_TIMEOUT_MS + 5000}ms` };
}
let structuredActivity: ActivityData; let structuredActivity: ActivityData;
if (!activityJson) { if (!activityJson) {
logger.info(`No data found for activity ID ${activityId} from engage API. Caching as empty.`); // CRITICAL: Only cache empty data if forceUpdate is true
// This prevents 5xx errors from overwriting valid local data
if (forceUpdate) {
logger.info(`No data found for activity ID ${activityId} from engage API. Force updating cache.`);
structuredActivity = { structuredActivity = {
lastCheck: new Date().toISOString(), lastCheck: new Date().toISOString(),
source: 'api-fetch-empty' source: 'api-fetch-empty'
}; };
await setActivityData(activityId, structuredActivity);
return structuredActivity;
} else {
logger.warn(`No data for activity ${activityId}. Preserving existing cache - NOT updating.`);
const existingData = await getActivityData(activityId);
return existingData || { lastCheck: new Date().toISOString(), source: 'cache-preserved' };
}
} else { } else {
structuredActivity = await structActivityData(activityJson); structuredActivity = await structActivityData(activityJson);
if (structuredActivity && structuredActivity.photo && if (structuredActivity && structuredActivity.photo &&
@@ -94,12 +124,19 @@ async function processAndCacheActivity(activityId: string): Promise<ActivityData
return structuredActivity; return structuredActivity;
} catch (error) { } catch (error) {
logger.error(`Error processing activity ID ${activityId}:`, error); logger.error(`Error processing activity ID ${activityId}:`, error);
// CRITICAL: On error, preserve existing cache instead of overwriting with error data
if (forceUpdate) {
const errorData: ActivityData = { const errorData: ActivityData = {
lastCheck: new Date().toISOString(), lastCheck: new Date().toISOString(),
error: "Failed to fetch or process" error: "Failed to fetch or process"
}; };
await setActivityData(activityId, errorData); await setActivityData(activityId, errorData);
return errorData; return errorData;
} else {
logger.warn(`Error fetching activity ${activityId}. Preserving existing cache.`);
const existingData = await getActivityData(activityId);
return existingData || { lastCheck: new Date().toISOString(), error: (error as Error).message };
}
} }
} }
@@ -200,7 +237,6 @@ export async function updateStaleClubs(): Promise<void> {
if (staleActivityIds.length === 0) { if (staleActivityIds.length === 0) {
logger.info('No stale activities found. Skipping update.'); logger.info('No stale activities found. Skipping update.');
await cleanupOrphanedS3Images();
logger.info('Stale club check finished.'); logger.info('Stale club check finished.');
return; return;
} }
@@ -230,8 +266,6 @@ export async function updateStaleClubs(): Promise<void> {
// Process stale activities concurrently // Process stale activities concurrently
await processor.process(staleActivityIds); await processor.process(staleActivityIds);
await cleanupOrphanedS3Images();
logger.info('Stale club check finished.'); logger.info('Stale club check finished.');
} }
@@ -280,59 +314,3 @@ export async function initializeOrUpdateStaffCache(forceUpdate: boolean = false)
logger.error('Error initializing or updating staff cache:', error); logger.error('Error initializing or updating staff cache:', error);
} }
} }
/**
* Clean up orphaned S3 images
*/
export async function cleanupOrphanedS3Images(): Promise<void> {
logger.info('Starting S3 orphan image cleanup...');
const s3ObjectListPrefix = S3_IMAGE_PREFIX ? `${S3_IMAGE_PREFIX}/` : '';
try {
const referencedS3Urls = new Set<string>();
const allActivityRedisKeys = await getAllActivityKeys();
const S3_ENDPOINT = process.env.S3_ENDPOINT;
for (const redisKey of allActivityRedisKeys) {
const activityId = redisKey.substring(ACTIVITY_KEY_PREFIX.length);
const activityData = await getActivityData(activityId);
if (activityData &&
typeof activityData.photo === 'string' &&
activityData.photo.startsWith('http') &&
S3_ENDPOINT &&
activityData.photo.startsWith(S3_ENDPOINT)) {
referencedS3Urls.add(activityData.photo);
}
}
logger.info(`Found ${referencedS3Urls.size} unique S3 URLs referenced in Redis.`);
const s3ObjectKeys = await listS3Objects(s3ObjectListPrefix);
if (!s3ObjectKeys || s3ObjectKeys.length === 0) {
logger.info(`No images found in S3 under prefix "${s3ObjectListPrefix}". Nothing to clean up.`);
return;
}
logger.debug(`Found ${s3ObjectKeys.length} objects in S3 under prefix "${s3ObjectListPrefix}".`);
const orphanedObjectKeys: string[] = [];
for (const objectKey of s3ObjectKeys) {
const s3Url = constructS3Url(objectKey);
if (s3Url && !referencedS3Urls.has(s3Url)) {
orphanedObjectKeys.push(objectKey);
}
}
if (orphanedObjectKeys.length > 0) {
logger.info(`Found ${orphanedObjectKeys.length} orphaned S3 objects to delete. Submitting deletion...`);
await deleteS3Objects(orphanedObjectKeys);
} else {
logger.info('No orphaned S3 images found after comparison.');
}
logger.info('S3 orphan image cleanup finished.');
} catch (error) {
logger.error('Error during S3 orphan image cleanup:', error);
}
}

View File

@@ -11,6 +11,39 @@ let _inMemoryCookies: Cookie[] | null = null;
// Login lock to prevent concurrent login attempts // Login lock to prevent concurrent login attempts
let _loginLock: Promise<Cookie[]> | null = null; let _loginLock: Promise<Cookie[]> | null = null;
// Cookie backup: preserved before clearCookieCache, restored on re-login failure
let _cookieBackup: Cookie[] | null = null;
// Auth failure throttle: debounce consecutive re-login triggers from 500 errors
// Prevents thundering herd when server is slow and returns many 500s
let _authFailureCooldownUntil = 0;
const AUTH_FAILURE_COOLDOWN_MS = 15000; // 15s cooldown between re-login cycles
/**
* Put all callers to wait during auth cooldown window.
* Returns true if auth is allowed (outside cooldown), false if throttled.
*/
export function tryAcquireAuthLock(): boolean {
const now = Date.now();
if (now < _authFailureCooldownUntil) {
const remaining = _authFailureCooldownUntil - now;
logger.warn(
`Re-login throttled: ${Math.round(remaining / 1000)}s cooldown remaining. ` +
`Existing cookies are likely still valid — server 500 is a temporary slowdown.`
);
return false;
}
return true;
}
/**
* Called after a successful re-login to release the cooldown.
*/
export function releaseAuthCooldown(): void {
_authFailureCooldownUntil = Date.now() + AUTH_FAILURE_COOLDOWN_MS;
logger.info(`Auth cooldown set: ${AUTH_FAILURE_COOLDOWN_MS}ms to prevent thundering herd re-logins.`);
}
/** /**
* Ensure only one login process runs at a time * Ensure only one login process runs at a time
*/ */
@@ -178,8 +211,40 @@ export async function saveCookiesToCache(cookies: Cookie[]): Promise<void> {
} }
} }
/**
* Backup current cookies before clearing. Restored if re-login fails.
*/
export function backupCookies(): Cookie[] | null {
if (_inMemoryCookies) {
_cookieBackup = [..._inMemoryCookies];
logger.info('Cookies backed up before clear.');
}
return _cookieBackup;
}
/**
* Restore cookies from backup after failed re-login.
*/
export async function restoreCookieBackup(): Promise<boolean> {
if (_cookieBackup) {
_inMemoryCookies = _cookieBackup;
try {
await fs.promises.writeFile(COOKIE_FILE_PATH, JSON.stringify(_cookieBackup, null, 2), 'utf-8');
logger.info('Cookies restored from backup successfully.');
_cookieBackup = null;
return true;
} catch (error: any) {
logger.error('Failed to restore cookies from backup:', error.message);
return false;
}
}
logger.warn('No cookie backup available for restore.');
return false;
}
/** /**
* Clear cookie cache * Clear cookie cache
* Prefer backupAndClearCookieCache() instead to preserve old cookies.
*/ */
export async function clearCookieCache(): Promise<void> { export async function clearCookieCache(): Promise<void> {
_inMemoryCookies = null; _inMemoryCookies = null;

View File

@@ -149,41 +149,6 @@ export async function listS3Objects(prefix: string): Promise<string[]> {
} }
} }
/**
* Deletes multiple objects from S3.
* @param objectKeysArray - Array of object keys to delete
* @returns True if successful or partially successful, false on major error
*/
export async function deleteS3Objects(objectKeysArray: string[]): Promise<boolean> {
if (!s3Client) {
logger.warn('S3 client not configured. Cannot delete objects.');
return false;
}
if (!objectKeysArray || objectKeysArray.length === 0) {
logger.info('No objects to delete from S3.');
return true;
}
try {
let successCount = 0;
let errorCount = 0;
for (const key of objectKeysArray) {
try {
await s3Client.delete(key);
successCount++;
} catch (error) {
errorCount++;
logger.error(`Failed to delete object ${key}:`, error);
}
}
logger.info(`Deleted ${successCount} objects from S3. Failed: ${errorCount}`);
return errorCount === 0; // True if all succeeded
} catch (error) {
logger.error('S3 DeleteObjects Error:', error);
return false;
}
}
/** /**
* Constructs the public S3 URL for an object key. * Constructs the public S3 URL for an object key.
* Uses S3_PUBLIC_URL if set (reverse proxy scenario), otherwise uses S3_ENDPOINT. * Uses S3_PUBLIC_URL if set (reverse proxy scenario), otherwise uses S3_ENDPOINT.