init: port to typescript and bun

This commit is contained in:
JamesFlare1212
2025-05-10 23:39:39 -04:00
commit 2543e56ec4
19 changed files with 2099 additions and 0 deletions

253
services/cache-manager.ts Normal file
View File

@@ -0,0 +1,253 @@
// services/cache-manager.ts
import { config } from 'dotenv';
import pLimit from 'p-limit';
import { fetchActivityData } from '../engage-api/get-activity';
import { structActivityData } from '../engage-api/struct-activity';
import { structStaffData } from '../engage-api/struct-staff';
import {
getActivityData,
setActivityData,
getStaffData,
setStaffData,
getAllActivityKeys,
ACTIVITY_KEY_PREFIX
} from './redis-service';
import { uploadImageFromBase64, listS3Objects, deleteS3Objects, constructS3Url } from './s3-service';
import { extractBase64Image } from '../utils/image-processor';
import { logger } from '../utils/logger';
import type { ActivityData } from '../models/activity';
config();
// Environment configuration
const USERNAME = process.env.API_USERNAME;
const PASSWORD = process.env.API_PASSWORD;
const MIN_ACTIVITY_ID_SCAN = parseInt(process.env.MIN_ACTIVITY_ID_SCAN || '0', 10);
const MAX_ACTIVITY_ID_SCAN = parseInt(process.env.MAX_ACTIVITY_ID_SCAN || '9999', 10);
const CONCURRENT_API_CALLS = parseInt(process.env.CONCURRENT_API_CALLS || '10', 10);
const CLUB_UPDATE_INTERVAL_MINS = parseInt(process.env.CLUB_UPDATE_INTERVAL_MINS || '60', 10);
const STAFF_UPDATE_INTERVAL_MINS = parseInt(process.env.STAFF_UPDATE_INTERVAL_MINS || '60', 10);
const FIXED_STAFF_ACTIVITY_ID = process.env.FIXED_STAFF_ACTIVITY_ID;
const S3_IMAGE_PREFIX = (process.env.S3_PUBLIC_URL_PREFIX || 'files').replace(/\/$/, '');
// Limit concurrent API calls
const limit = pLimit(CONCURRENT_API_CALLS);
/**
* Process and cache a single activity
* @param activityId - The activity ID to process
* @returns The processed activity data
*/
async function processAndCacheActivity(activityId: string): Promise<ActivityData> {
logger.debug(`Processing activity ID: ${activityId}`);
try {
if (!USERNAME || !PASSWORD) {
throw new Error('API username or password not configured');
}
const activityJson = await fetchActivityData(activityId, USERNAME, PASSWORD);
let structuredActivity: ActivityData;
if (!activityJson) {
logger.info(`No data found for activity ID ${activityId} from engage API. Caching as empty.`);
structuredActivity = {
lastCheck: new Date().toISOString(),
source: 'api-fetch-empty'
};
} else {
structuredActivity = await structActivityData(activityJson);
if (structuredActivity && structuredActivity.photo &&
typeof structuredActivity.photo === 'string' &&
structuredActivity.photo.startsWith('data:image')) {
const imageInfo = extractBase64Image(structuredActivity.photo);
if (imageInfo) {
const s3Url = await uploadImageFromBase64(
imageInfo.base64Content,
imageInfo.format,
activityId
);
if (s3Url) {
structuredActivity.photo = s3Url;
} else {
logger.warn(`Failed S3 upload for activity ${activityId}. Photo may be base64 or null.`);
}
}
}
}
structuredActivity.lastCheck = new Date().toISOString();
await setActivityData(activityId, structuredActivity);
return structuredActivity;
} catch (error) {
logger.error(`Error processing activity ID ${activityId}:`, error);
const errorData: ActivityData = {
lastCheck: new Date().toISOString(),
error: "Failed to fetch or process"
};
await setActivityData(activityId, errorData);
return errorData;
}
}
/**
* Initialize the club cache by scanning through all activity IDs
*/
export async function initializeClubCache(): Promise<void> {
logger.info(`Starting initial club cache population from ID ${MIN_ACTIVITY_ID_SCAN} to ${MAX_ACTIVITY_ID_SCAN}`);
const promises: Promise<void>[] = [];
for (let i = MIN_ACTIVITY_ID_SCAN; i <= MAX_ACTIVITY_ID_SCAN; i++) {
const activityId = String(i);
promises.push(limit(async () => {
const cachedData = await getActivityData(activityId);
if (!cachedData ||
Object.keys(cachedData).length === 0 ||
!cachedData.lastCheck ||
cachedData.error) {
logger.debug(`Initializing cache for activity ID: ${activityId}`);
await processAndCacheActivity(activityId);
}
}));
}
await Promise.all(promises);
logger.info('Initial club cache population finished.');
}
/**
* Update stale clubs in the cache
*/
export async function updateStaleClubs(): Promise<void> {
logger.info('Starting stale club check...');
const now = Date.now();
const updateIntervalMs = CLUB_UPDATE_INTERVAL_MINS * 60 * 1000;
const promises: Promise<void>[] = [];
const activityKeys = await getAllActivityKeys();
for (const key of activityKeys) {
const activityId = key.substring(ACTIVITY_KEY_PREFIX.length);
promises.push(limit(async () => {
const cachedData = await getActivityData(activityId);
if (cachedData && cachedData.lastCheck) {
const lastCheckTime = new Date(cachedData.lastCheck).getTime();
if ((now - lastCheckTime) > updateIntervalMs || cachedData.error) {
logger.info(`Activity ${activityId} is stale or had error. Updating...`);
await processAndCacheActivity(activityId);
}
} else if (!cachedData || Object.keys(cachedData).length === 0) {
logger.info(`Activity ${activityId} not in cache or is empty object. Attempting to fetch...`);
await processAndCacheActivity(activityId);
}
}));
}
await cleanupOrphanedS3Images();
await Promise.all(promises);
logger.info('Stale club check finished.');
}
/**
* Initialize or update the staff cache
* @param forceUpdate - Force an update regardless of staleness
*/
export async function initializeOrUpdateStaffCache(forceUpdate: boolean = false): Promise<void> {
logger.info('Starting staff cache check/update...');
try {
const cachedStaffData = await getStaffData();
const now = Date.now();
const updateIntervalMs = STAFF_UPDATE_INTERVAL_MINS * 60 * 1000;
let needsUpdate = forceUpdate;
if (!cachedStaffData || !cachedStaffData.lastCheck) {
needsUpdate = true;
} else {
const lastCheckTime = new Date(cachedStaffData.lastCheck).getTime();
if ((now - lastCheckTime) > updateIntervalMs) {
needsUpdate = true;
}
}
if (needsUpdate && USERNAME && PASSWORD && FIXED_STAFF_ACTIVITY_ID) {
logger.info('Staff data needs update. Fetching...');
const activityJson = await fetchActivityData(FIXED_STAFF_ACTIVITY_ID, USERNAME, PASSWORD);
if (activityJson) {
const staffMap = await structStaffData(activityJson);
const staffObject = Object.fromEntries(staffMap);
staffObject.lastCheck = new Date().toISOString();
await setStaffData(staffObject);
logger.info('Staff data updated and cached.');
} else {
logger.warn(`Could not retrieve base data for staff (activity ID ${FIXED_STAFF_ACTIVITY_ID}).`);
if (cachedStaffData && cachedStaffData.lastCheck) {
cachedStaffData.lastCheck = new Date().toISOString();
await setStaffData(cachedStaffData);
}
}
} else {
logger.info('Staff data is up-to-date.');
}
} catch (error) {
logger.error('Error initializing or updating staff cache:', error);
}
}
/**
* Clean up orphaned S3 images
*/
export async function cleanupOrphanedS3Images(): Promise<void> {
logger.info('Starting S3 orphan image cleanup...');
const s3ObjectListPrefix = S3_IMAGE_PREFIX ? `${S3_IMAGE_PREFIX}/` : '';
try {
const referencedS3Urls = new Set<string>();
const allActivityRedisKeys = await getAllActivityKeys();
const S3_ENDPOINT = process.env.S3_ENDPOINT;
for (const redisKey of allActivityRedisKeys) {
const activityId = redisKey.substring(ACTIVITY_KEY_PREFIX.length);
const activityData = await getActivityData(activityId);
if (activityData &&
typeof activityData.photo === 'string' &&
activityData.photo.startsWith('http') &&
S3_ENDPOINT &&
activityData.photo.startsWith(S3_ENDPOINT)) {
referencedS3Urls.add(activityData.photo);
}
}
logger.info(`Found ${referencedS3Urls.size} unique S3 URLs referenced in Redis.`);
const s3ObjectKeys = await listS3Objects(s3ObjectListPrefix);
if (!s3ObjectKeys || s3ObjectKeys.length === 0) {
logger.info(`No images found in S3 under prefix "${s3ObjectListPrefix}". Nothing to clean up.`);
return;
}
logger.debug(`Found ${s3ObjectKeys.length} objects in S3 under prefix "${s3ObjectListPrefix}".`);
const orphanedObjectKeys: string[] = [];
for (const objectKey of s3ObjectKeys) {
const s3Url = constructS3Url(objectKey);
if (s3Url && !referencedS3Urls.has(s3Url)) {
orphanedObjectKeys.push(objectKey);
}
}
if (orphanedObjectKeys.length > 0) {
logger.info(`Found ${orphanedObjectKeys.length} orphaned S3 objects to delete. Submitting deletion...`);
await deleteS3Objects(orphanedObjectKeys);
} else {
logger.info('No orphaned S3 images found after comparison.');
}
logger.info('S3 orphan image cleanup finished.');
} catch (error) {
logger.error('Error during S3 orphan image cleanup:', error);
}
}

149
services/redis-service.ts Normal file
View File

@@ -0,0 +1,149 @@
// services/redis-service.ts
import { RedisClient } from "bun";
import { config } from 'dotenv';
import { logger } from '../utils/logger';
config();
export const ACTIVITY_KEY_PREFIX = 'activity:'; // Exported for use in cache-manager
const STAFF_KEY = 'staffs:all';
// Always create a new client instance with .env config
const redisUrl = process.env.REDIS_URL || 'redis://localhost:6379';
let redisClient: RedisClient | null = null;
try {
redisClient = new RedisClient(redisUrl);
logger.info('Redis client initialized. Connection will be established on first command.');
} catch (error) {
logger.error('Failed to initialize Redis client:', error);
}
/**
* Gets activity data from Redis.
* @param activityId - The activity ID to fetch
* @returns Parsed JSON object or null if not found/error
*/
export async function getActivityData(activityId: string): Promise<any | null> {
if (!redisClient) {
logger.warn('Redis client not available, skipping getActivityData');
return null;
}
try {
const data = await redisClient.get(`${ACTIVITY_KEY_PREFIX}${activityId}`);
return data ? JSON.parse(data) : null;
} catch (err) {
logger.error(`Error getting activity ${activityId} from Redis:`, err);
return null;
}
}
/**
* Sets activity data in Redis.
* @param activityId - The activity ID to set
* @param data - The activity data object
*/
export async function setActivityData(activityId: string, data: any): Promise<void> {
if (!redisClient) {
logger.warn('Redis client not available, skipping setActivityData');
return;
}
try {
await redisClient.set(`${ACTIVITY_KEY_PREFIX}${activityId}`, JSON.stringify(data));
} catch (err) {
logger.error(`Error setting activity ${activityId} in Redis:`, err);
}
}
/**
* Gets staff data from Redis.
* @returns Parsed JSON object or null if not found/error
*/
export async function getStaffData(): Promise<any | null> {
if (!redisClient) {
logger.warn('Redis client not available, skipping getStaffData');
return null;
}
try {
const data = await redisClient.get(STAFF_KEY);
return data ? JSON.parse(data) : null;
} catch (err) {
logger.error('Error getting staff data from Redis:', err);
return null;
}
}
/**
* Sets staff data in Redis.
* @param data - The staff data object
*/
export async function setStaffData(data: any): Promise<void> {
if (!redisClient) {
logger.warn('Redis client not available, skipping setStaffData');
return;
}
try {
await redisClient.set(STAFF_KEY, JSON.stringify(data));
} catch (err) {
logger.error('Error setting staff data in Redis:', err);
}
}
/**
* Gets all activity keys from Redis.
* This can be resource-intensive on large datasets. Use with caution.
* @returns An array of keys
*/
export async function getAllActivityKeys(): Promise<string[]> {
if (!redisClient) {
logger.warn('Redis client not available, skipping getAllActivityKeys');
return [];
}
try {
// Using raw SCAN command since Bun's RedisClient doesn't have a scan method
const keys: string[] = [];
let cursor = '0';
do {
// Use send method to execute raw Redis commands
const result = await redisClient.send('SCAN', [
cursor,
'MATCH',
`${ACTIVITY_KEY_PREFIX}*`,
'COUNT',
'100'
]);
cursor = result[0];
const foundKeys = result[1] || [];
// Add the found keys to our array
keys.push(...foundKeys);
} while (cursor !== '0');
logger.info(`Found ${keys.length} activity keys in Redis using SCAN.`);
return keys;
} catch (err) {
logger.error('Error getting all activity keys from Redis using SCAN:', err);
return []; // Return empty array on error
}
}
/**
* Gets the Redis client instance.
* @returns The Redis client or null if not initialized
*/
export function getRedisClient(): RedisClient | null {
return redisClient;
}
/**
* Closes the Redis connection.
*/
export async function closeRedisConnection(): Promise<void> {
if (redisClient) {
redisClient.close();
logger.info('Redis connection closed.');
}
}

203
services/s3-service.ts Normal file
View File

@@ -0,0 +1,203 @@
// services/s3-service.ts
import { S3Client } from "bun";
import { v4 as uuidv4 } from 'uuid';
import { config } from 'dotenv';
import { logger } from '../utils/logger';
import { decodeBase64Image } from '../utils/image-processor';
config();
// S3 configuration
const S3_ENDPOINT = process.env.S3_ENDPOINT;
const S3_REGION = process.env.S3_REGION;
const S3_ACCESS_KEY_ID = process.env.S3_ACCESS_KEY_ID;
const S3_SECRET_ACCESS_KEY = process.env.S3_SECRET_ACCESS_KEY;
const BUCKET_NAME = process.env.S3_BUCKET_NAME;
const PUBLIC_URL_FILE_PREFIX = (process.env.S3_PUBLIC_URL_PREFIX || 'files').replace(/\/$/, '');
// Initialize S3 client
let s3Client: S3Client | null = null;
if (S3_ACCESS_KEY_ID && S3_SECRET_ACCESS_KEY && BUCKET_NAME) {
try {
s3Client = new S3Client({
accessKeyId: S3_ACCESS_KEY_ID,
secretAccessKey: S3_SECRET_ACCESS_KEY,
bucket: BUCKET_NAME,
endpoint: S3_ENDPOINT,
region: S3_REGION
});
logger.info('S3 client initialized successfully.');
} catch (error) {
logger.error('Failed to initialize S3 client:', error);
}
} else {
logger.warn('S3 client configuration is incomplete. S3 operations will be disabled.');
}
/**
* Uploads an image from a base64 string to S3.
* @param base64Data - The base64 content (without the data URI prefix)
* @param originalFormat - The image format (e.g., 'png', 'jpeg')
* @param activityId - The activity ID, used for naming
* @returns The public URL of the uploaded image or null on error
*/
export async function uploadImageFromBase64(
base64Data: string,
originalFormat: string,
activityId: string
): Promise<string | null> {
if (!s3Client) {
logger.warn('S3 client not configured. Cannot upload image.');
return null;
}
if (!base64Data || !originalFormat || !activityId) {
logger.error('S3 Upload: Missing base64Data, originalFormat, or activityId');
return null;
}
try {
const imageBuffer = decodeBase64Image(base64Data);
const objectKey = `${PUBLIC_URL_FILE_PREFIX}/activity-${activityId}-${uuidv4()}.${originalFormat}`;
// Using Bun's S3Client file API
const s3File = s3Client.file(objectKey);
await s3File.write(imageBuffer, {
type: `image/${originalFormat}`,
acl: 'public-read'
});
const publicUrl = constructS3Url(objectKey);
logger.info(`Image uploaded to S3: ${publicUrl}`);
return publicUrl;
} catch (error) {
logger.error(`S3 Upload Error for activity ${activityId}:`, error);
return null;
}
}
/**
* Lists all objects in the S3 bucket under a specific prefix.
* @param prefix - The prefix to filter objects by
* @returns A list of object keys
*/
export async function listS3Objects(prefix: string): Promise<string[]> {
if (!s3Client) {
logger.warn('S3 client not configured. Cannot list objects.');
return [];
}
logger.debug(`Listing objects from S3 with prefix: "${prefix}"`);
try {
const objectKeys: string[] = [];
let isTruncated = true;
let startAfter: string | undefined;
while (isTruncated) {
// Use Bun's list method with pagination
const result = await s3Client.list({
prefix,
startAfter,
maxKeys: 1000
});
if (result.contents) {
// Add keys to our array, filtering out "directories"
result.contents.forEach(item => {
if (item.key && !item.key.endsWith('/')) {
objectKeys.push(item.key);
}
});
// Get the last key for pagination
if (result.contents?.length > 0) {
startAfter = result.contents[result.contents.length - 1]?.key;
}
}
isTruncated = result.isTruncated || false;
// Safety check to prevent infinite loops
if (result.contents?.length === 0) {
break;
}
}
logger.info(`Listed ${objectKeys.length} object keys from S3 with prefix "${prefix}"`);
return objectKeys;
} catch (error) {
logger.error(`S3 ListObjects Error with prefix "${prefix}":`, error);
return [];
}
}
/**
* Deletes multiple objects from S3.
* @param objectKeysArray - Array of object keys to delete
* @returns True if successful or partially successful, false on major error
*/
export async function deleteS3Objects(objectKeysArray: string[]): Promise<boolean> {
if (!s3Client) {
logger.warn('S3 client not configured. Cannot delete objects.');
return false;
}
if (!objectKeysArray || objectKeysArray.length === 0) {
logger.info('No objects to delete from S3.');
return true;
}
try {
// With Bun's S3Client, we need to delete objects one by one
// Process in batches of 100 for better performance
const BATCH_SIZE = 100;
let successCount = 0;
let errorCount = 0;
for (let i = 0; i < objectKeysArray.length; i += BATCH_SIZE) {
const batch = objectKeysArray.slice(i, i + BATCH_SIZE);
// Process batch in parallel
const results = await Promise.allSettled(
batch.map(key => s3Client!.delete(key))
);
// Count successes and failures
for (const result of results) {
if (result.status === 'fulfilled') {
successCount++;
} else {
errorCount++;
logger.error(`Failed to delete object: ${result.reason}`);
}
}
}
logger.info(`Deleted ${successCount} objects from S3. Failed: ${errorCount}`);
return errorCount === 0; // True if all succeeded
} catch (error) {
logger.error('S3 DeleteObjects Error:', error);
return false;
}
}
/**
* Constructs the public S3 URL for an object key.
* @param objectKey - The key of the object in S3
* @returns The full public URL
*/
export function constructS3Url(objectKey: string): string {
if (!S3_ENDPOINT || !BUCKET_NAME) {
return '';
}
// Ensure S3_ENDPOINT does not end with a slash
const s3Base = S3_ENDPOINT.replace(/\/$/, '');
// Ensure BUCKET_NAME does not start or end with a slash
const bucket = BUCKET_NAME.replace(/^\//, '').replace(/\/$/, '');
// Ensure objectKey does not start with a slash
const key = objectKey.replace(/^\//, '');
return `${s3Base}/${bucket}/${key}`;
}