feat: redis cache and detach image into s3

This commit is contained in:
JamesFlare1212
2025-05-09 19:43:01 -04:00
parent 99d1ee0a1e
commit f7252345f3
14 changed files with 2302 additions and 202 deletions

289
main.mjs Normal file
View File

@@ -0,0 +1,289 @@
// main.mjs
import express from 'express';
import dotenv from 'dotenv';
import cors from 'cors';
import { fetchActivityData } from './engage-api/get-activity.mjs';
import { structActivityData } from './engage-api/struct-activity.mjs';
import { structStaffData } from './engage-api/struct-staff.mjs';
import {
getActivityData,
setActivityData,
getStaffData,
setStaffData,
getRedisClient,
getAllActivityKeys, // Make sure this is imported
ACTIVITY_KEY_PREFIX // Import the prefix
} from './services/redis-service.mjs';
import { uploadImageFromBase64 } from './services/s3-service.mjs';
import { extractBase64Image } from './utils/image-processor.mjs';
import {
initializeClubCache,
updateStaleClubs,
initializeOrUpdateStaffCache,
cleanupOrphanedS3Images
} from './services/cache-manager.mjs';
import { logger } from './utils/logger.mjs';
dotenv.config();
const USERNAME = process.env.API_USERNAME;
const PASSWORD = process.env.API_PASSWORD;
const PORT = process.env.PORT || 3000;
const FIXED_STAFF_ACTIVITY_ID = process.env.FIXED_STAFF_ACTIVITY_ID;
const allowedOriginsEnv = process.env.ALLOWED_ORIGINS || '*';
const CLUB_CHECK_INTERVAL_SECONDS = parseInt(process.env.CLUB_CHECK_INTERVAL_SECONDS || '300', 10);
const STAFF_CHECK_INTERVAL_SECONDS = parseInt(process.env.STAFF_CHECK_INTERVAL_SECONDS || '300', 10);
let corsOptions;
if (allowedOriginsEnv === '*') {
corsOptions = { origin: '*' };
} else {
const originsArray = allowedOriginsEnv.split(',').map(origin => origin.trim());
corsOptions = {
origin: function (origin, callback) {
if (!origin || originsArray.indexOf(origin) !== -1 || originsArray.includes('*')) {
callback(null, true);
} else {
callback(new Error('Not allowed by CORS'));
}
}
};
}
const app = express();
app.use(cors(corsOptions));
app.use(express.json());
// Helper function to process activity data (fetch, struct, S3, cache) for API calls
async function fetchProcessAndStoreActivity(activityId) {
logger.info(`API call: Cache miss or forced fetch for activity ID: ${activityId}.`);
const activityJson = await fetchActivityData(activityId, USERNAME, PASSWORD);
if (!activityJson) {
logger.warn(`API call: No data from engage API for activity ${activityId}. Caching as empty.`);
const emptyData = { lastCheck: new Date().toISOString(), source: 'api-fetch-empty' };
await setActivityData(activityId, emptyData);
return { data: emptyData, status: 404 };
}
let structuredActivity = await structActivityData(activityJson);
if (structuredActivity && structuredActivity.photo && structuredActivity.photo.startsWith('data:image')) {
const imageInfo = extractBase64Image(structuredActivity.photo);
if (imageInfo) {
const s3Url = await uploadImageFromBase64(imageInfo.base64Content, imageInfo.format, activityId);
if (s3Url) {
structuredActivity.photo = s3Url;
} else {
logger.warn(`API call: Failed S3 upload for activity ${activityId}. Photo may be base64 or null.`);
}
}
}
structuredActivity.lastCheck = new Date().toISOString();
await setActivityData(activityId, structuredActivity);
return { data: structuredActivity, status: 200 };
}
// --- API Endpoints ---
app.get('/', (req, res) => {
res.send('Welcome to the DSAS CCA API!<br/><br/>\
API Endpoints:<br/>\
GET /v1/activity/list<br/>\
GET /v1/activity/:activityId (ID must be 1-4 digits)<br/>\
GET /v1/staffs');
});
// NEW ENDPOINT: /v1/activity/list
app.get('/v1/activity/list', async (req, res) => {
try {
logger.info('Request received for /v1/activity/list');
const activityKeys = await getAllActivityKeys(); // From redis-service.mjs
const clubList = {};
if (!activityKeys || activityKeys.length === 0) {
logger.info('No activity keys found in Redis for list.');
return res.json({}); // Return empty object if no keys
}
// Fetch all activity data in parallel
// Note: This can be many individual GETs to Redis.
// For a very large number of keys, consider if this approach is too slow.
// However, Redis is fast, and Promise.all helps manage concurrency.
const allActivityDataPromises = activityKeys.map(async (key) => {
const activityId = key.substring(ACTIVITY_KEY_PREFIX.length);
return getActivityData(activityId);
});
const allActivities = await Promise.all(allActivityDataPromises);
allActivities.forEach(activityData => {
// Check for a valid club:
// 1. activityData exists
// 2. It has an 'id' and a 'name'
// 3. It's not an error placeholder (no 'error' field)
// 4. It's not an intentionally empty record (no 'source: api-fetch-empty' field, or check if other essential fields are missing)
if (activityData &&
activityData.id &&
activityData.name && // Ensure name is present and not empty
!activityData.error &&
activityData.source !== 'api-fetch-empty') {
clubList[activityData.id] = activityData.name;
}
});
logger.info(`Returning list of ${Object.keys(clubList).length} valid clubs.`);
res.json(clubList);
} catch (error) {
logger.error('Error in /v1/activity/list endpoint:', error);
res.status(500).json({ error: 'An internal server error occurred while generating activity list.' });
}
});
app.get('/v1/activity/:activityId', async (req, res) => {
let { activityId } = req.params;
if (!/^\d{1,4}$/.test(activityId)) {
return res.status(400).json({ error: 'Invalid Activity ID format.' });
}
if (!USERNAME || !PASSWORD) {
logger.error('API username or password not configured.');
return res.status(500).json({ error: 'Server configuration error.' });
}
try {
let cachedActivity = await getActivityData(activityId);
const isValidCacheEntry = cachedActivity &&
!cachedActivity.error &&
Object.keys(cachedActivity).filter(k => k !== 'lastCheck' && k !== 'cache' && k !== 'source').length > 0;
if (isValidCacheEntry) {
logger.info(`Cache HIT for activity ID: ${activityId}`);
cachedActivity.cache = "HIT";
return res.json(cachedActivity);
}
logger.info(`Cache MISS or stale/empty for activity ID: ${activityId}. Fetching...`);
const { data: liveActivity, status } = await fetchProcessAndStoreActivity(activityId);
liveActivity.cache = "MISS";
if (status === 404 && Object.keys(liveActivity).filter(k => k !== 'lastCheck' && k !== 'cache' && k !== 'source').length === 0) {
return res.status(404).json({ error: `Activity ${activityId} not found.`, ...liveActivity });
}
res.status(status).json(liveActivity);
} catch (error) {
logger.error(`Error in /v1/activity/${activityId} endpoint:`, error);
res.status(500).json({ error: 'An internal server error occurred.', cache: "ERROR" });
}
});
app.get('/v1/staffs', async (req, res) => {
if (!USERNAME || !PASSWORD) {
logger.error('API username or password not configured.');
return res.status(500).json({ error: 'Server configuration error.' });
}
try {
let cachedStaffs = await getStaffData();
if (cachedStaffs && cachedStaffs.lastCheck) {
logger.info('Cache HIT for staffs.');
cachedStaffs.cache = "HIT";
return res.json(cachedStaffs);
}
logger.info('Cache MISS for staffs. Fetching from source.');
const activityJson = await fetchActivityData(FIXED_STAFF_ACTIVITY_ID, USERNAME, PASSWORD);
if (activityJson) {
const staffMap = await structStaffData(activityJson);
let staffObject = Object.fromEntries(staffMap);
staffObject.lastCheck = new Date().toISOString();
staffObject.cache = "MISS";
await setStaffData(staffObject);
res.json(staffObject);
} else {
logger.error(`Could not retrieve base data for staffs (activity ID ${FIXED_STAFF_ACTIVITY_ID}).`);
res.status(404).json({ error: `Could not retrieve base data for staff details.`, cache: "MISS" });
}
} catch (error) {
logger.error('Error in /v1/staffs endpoint:', error);
res.status(500).json({ error: 'An internal server error occurred while fetching staff data.', cache: "ERROR" });
}
});
// Function to perform background initialization and periodic tasks
async function performBackgroundTasks() {
logger.info('Starting background initialization tasks...');
try {
await initializeClubCache();
await initializeOrUpdateStaffCache(true);
await cleanupOrphanedS3Images();
logger.info(`Setting up periodic club cache updates every ${CLUB_CHECK_INTERVAL_SECONDS} seconds.`);
setInterval(updateStaleClubs, CLUB_CHECK_INTERVAL_SECONDS * 1000);
logger.info(`Setting up periodic staff cache updates every ${STAFF_CHECK_INTERVAL_SECONDS} seconds.`);
setInterval(() => initializeOrUpdateStaffCache(false), STAFF_CHECK_INTERVAL_SECONDS * 1000);
logger.info('Background initialization and periodic task setup complete.');
} catch (error) {
logger.error('Error during background initialization tasks:', error);
}
}
// --- Start Server and Background Tasks ---
async function startServer() {
const redis = getRedisClient();
if (!redis) {
logger.error('Redis client is not initialized. Server cannot start. Check REDIS_URL.');
process.exit(1);
}
try {
await redis.ping();
logger.info('Redis connection confirmed.');
app.listen(PORT, () => {
logger.info(`Server is running on http://localhost:${PORT}`);
logger.info(`Allowed CORS origins: ${allowedOriginsEnv === '*' ? 'All (*)' : allowedOriginsEnv}`);
if (!USERNAME || !PASSWORD) {
logger.warn('Warning: API_USERNAME or API_PASSWORD is not set.');
}
});
performBackgroundTasks().catch(error => {
logger.error('Unhandled error in performBackgroundTasks:', error);
});
} catch (err) {
logger.error('Failed to connect to Redis or critical error during server startup. Server not started.', err);
process.exit(1);
}
}
if (process.env.NODE_ENV !== 'test') {
startServer();
}
process.on('SIGINT', async () => {
logger.info('Server shutting down (SIGINT)...');
const redis = getRedisClient();
if (redis) {
await redis.quit();
logger.info('Redis connection closed.');
}
process.exit(0);
});
process.on('SIGTERM', async () => {
logger.info('Server shutting down (SIGTERM)...');
const redis = getRedisClient();
if (redis) {
await redis.quit();
logger.info('Redis connection closed.');
}
process.exit(0);
});
export { app };