Upload files to "/"

This commit is contained in:
JoshBaneyCS 2025-04-29 01:15:41 +00:00
parent 44113c0ca4
commit cb2bec6fd4
5 changed files with 321 additions and 0 deletions

16
Dockerfile Normal file
View File

@ -0,0 +1,16 @@
# Dockerfile
FROM node:18-alpine
# Create app directory
WORKDIR /usr/src/app
# Install dependencies
COPY package*.json ./
RUN npm install --production
# Bundle app source
COPY . .
# Expose port and run
EXPOSE 3000
CMD ["node", "server.js"]

14
docker-compose.yaml Normal file
View File

@ -0,0 +1,14 @@
version: '3.8'
services:
fuego-app:
build:
context: .
dockerfile: Dockerfile
extra_hosts:
- "host.docker.internal:host-gateway"
env_file:
- .env
ports:
- "3000:3000"
restart: unless-stopped

18
package.json Normal file
View File

@ -0,0 +1,18 @@
{
"name": "Amazon-Fuego",
"version": "1.0.0",
"main": "server.js",
"scripts": {
"start": "node server.js"
},
"dependencies": {
"@aws-sdk/client-s3": "^3.300.0",
"axios": "^1.4.0",
"body-parser": "^1.20.2",
"csv-stringify": "^6.0.8",
"dotenv": "^16.3.1",
"express": "^4.18.2",
"mysql2": "^3.3.3",
"sqlite3": "^5.1.6"
}
}

30
s3.js Normal file
View File

@ -0,0 +1,30 @@
// s3.js
require('dotenv').config();
const { S3Client, PutObjectCommand } = require('@aws-sdk/client-s3');
const { stringify } = require('csv-stringify/sync');
const s3 = new S3Client({
region: process.env.AWS_REGION
});
/**
* Uploads a CSV to s3://<bucket>/trends/<key>.csv with public-read ACL.
* @param {string} key Date key like '20250423'
* @param {Array<Object>} rows Array of DB rows to stringify into CSV
* @returns {string} Public URL of the uploaded CSV
*/
async function uploadTrendsCsv(key, rows) {
// Convert rows to CSV string (includes header)
const csv = stringify(rows, { header: true });
const cmd = new PutObjectCommand({
Bucket: process.env.S3_BUCKET_NAME,
Key: `trends/${key}.csv`,
Body: csv,
ContentType: 'text/csv',
ACL: 'public-read'
});
await s3.send(cmd);
return `${process.env.S3_BASE_URL}/${key}.csv`;
}
module.exports = { uploadTrendsCsv };

243
server.js Normal file
View File

@ -0,0 +1,243 @@
// server.js
require('dotenv').config();
const express = require('express');
const mysql = require('mysql2/promise');
const bodyParser = require('body-parser');
const path = require('path');
const axios = require('axios');
const { uploadTrendsCsv } = require('./s3');
const app = express();
const PORT = process.env.PORT || 3000;
// In-memory shift counters
const shiftCounters = {};
// Helper: format a Date in EST as SQL DATETIME string
function formatDateEST(date) {
const pad = n => n.toString().padStart(2, '0');
return `${date.getFullYear()}-${pad(date.getMonth()+1)}-${pad(date.getDate())} ` +
`${pad(date.getHours())}:${pad(date.getMinutes())}:${pad(date.getSeconds())}`;
}
// Helper: format a Date in EST as ISO-like string (no “Z”)
function isoStringEST(date) {
const pad = n => n.toString().padStart(2, '0');
return `${date.getFullYear()}-${pad(date.getMonth()+1)}-${pad(date.getDate())}` +
`T${pad(date.getHours())}:${pad(date.getMinutes())}:${pad(date.getSeconds())}`;
}
// Compute heat index (NOAA formula)
function computeHeatIndex(T, R) {
const [c1,c2,c3,c4,c5,c6,c7,c8,c9] =
[-42.379,2.04901523,10.14333127,-0.22475541,-0.00683783,
-0.05481717,0.00122874,0.00085282,-0.00000199];
const HI = c1 + c2*T + c3*R + c4*T*R
+ c5*T*T + c6*R*R + c7*T*T*R
+ c8*T*R*R + c9*T*T*R*R;
return Math.round(HI * 100) / 100;
}
// Determine shift info in EST
function getShiftInfo(now) {
const estNow = new Date(now.toLocaleString('en-US', { timeZone: 'America/New_York' }));
const [h,m] = [estNow.getHours(), estNow.getMinutes()];
let shift, shiftStart = new Date(estNow);
if (h > 7 || (h === 7 && m >= 0)) {
if (h < 17 || (h === 17 && m < 30)) {
shift = 'Day';
shiftStart.setHours(7,0,0,0);
} else {
shift = 'Night';
shiftStart.setHours(17,30,0,0);
}
} else {
shift = 'Night';
shiftStart.setDate(shiftStart.getDate() - 1);
shiftStart.setHours(17,30,0,0);
}
const key = `${shift}-${shiftStart.toISOString().slice(0,10)}-` +
`${shiftStart.getHours()}${shiftStart.getMinutes()}`;
return { shift, shiftStart, key, estNow };
}
// MariaDB connection pool
const pool = mysql.createPool({
host: process.env.DB_HOST,
port: parseInt(process.env.DB_PORT, 10) || 3306,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME,
waitForConnections: true,
connectionLimit: 10,
queueLimit: 0,
connectTimeout: 10000,
enableKeepAlive: true,
keepAliveInitialDelay: 10000,
});
// Ensure readings table exists
(async () => {
const createSQL = `
CREATE TABLE IF NOT EXISTS readings (
id INT AUTO_INCREMENT PRIMARY KEY,
dockDoor INT NOT NULL,
direction VARCHAR(10) NOT NULL,
timestamp DATETIME NOT NULL,
temperature DOUBLE,
humidity DOUBLE,
heatIndex DOUBLE
);
`;
await pool.execute(createSQL);
})();
// SSE setup
let clients = [];
app.get('/api/stream', (req, res) => {
res.set({
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
Connection: 'keep-alive'
});
res.flushHeaders();
clients.push(res);
req.on('close', () => { clients = clients.filter(c => c !== res); });
});
function broadcast(event, data) {
const msg = `event: ${event}\ndata: ${JSON.stringify(data)}\n\n`;
clients.forEach(c => c.write(msg));
}
// Middleware & static files
app.use(bodyParser.json());
app.use(express.static(path.join(__dirname, 'public')));
// Dual-reading POST endpoint
app.post('/api/readings', async (req, res) => {
try {
console.log('🔔 POST /api/readings body:', req.body);
const { inbound = {}, outbound = {} } = req.body;
const { dockDoor: inDoor, temperature: inTemp, humidity: inHum } = inbound;
const { dockDoor: outDoor, temperature: outTemp, humidity: outHum } = outbound;
// Validate inputs
if ([inDoor, inTemp, inHum, outDoor, outTemp, outHum].some(v => v === undefined)) {
return res.status(400).json({ error: 'Missing one of inbound/outbound fields' });
}
// Compute heat indices
const hiIn = computeHeatIndex(inTemp, inHum);
const hiOut = computeHeatIndex(outTemp, outHum);
// Shift & period logic
const { shift, shiftStart, key, estNow } = getShiftInfo(new Date());
shiftCounters[key] = (shiftCounters[key] || 0) + 1;
const period = shiftCounters[key];
// Prepare EST timestamps
const sqlTimestamp = formatDateEST(estNow);
const broadcastTimestamp = isoStringEST(estNow);
// Insert readings into DB
const insertSQL = `
INSERT INTO readings
(dockDoor, direction, timestamp, temperature, humidity, heatIndex)
VALUES (?, ?, ?, ?, ?, ?)
`;
await pool.execute(insertSQL, [inDoor, 'inbound', sqlTimestamp, inTemp, inHum, hiIn]);
await pool.execute(insertSQL, [outDoor, 'outbound', sqlTimestamp, outTemp, outHum, hiOut]);
// Broadcast via SSE (EST)
broadcast('new-reading', {
dockDoor: inDoor,
direction: 'inbound',
timestamp: broadcastTimestamp,
temperature: inTemp,
humidity: inHum,
heatIndex: hiIn
});
broadcast('new-reading', {
dockDoor: outDoor,
direction: 'outbound',
timestamp: broadcastTimestamp,
temperature: outTemp,
humidity: outHum,
heatIndex: hiOut
});
// Generate and upload today's CSV, get URL
const dateKey = `${estNow.getFullYear()}${String(estNow.getMonth()+1).padStart(2,'0')}${String(estNow.getDate()).padStart(2,'0')}`;
const [rows] = await pool.execute(
`SELECT * FROM readings
WHERE DATE(timestamp) = CURDATE()
ORDER BY timestamp ASC`
);
let csvUrl = null;
try {
csvUrl = await uploadTrendsCsv(dateKey, rows);
} catch (uploadErr) {
console.error('Failed to upload CSV to S3:', uploadErr);
}
// Flat JSON Slack payload with CSV URL
const slackPayload = {
text: 'New temperature readings recorded',
inbound_dockDoor: inDoor,
inbound_temperature: inTemp,
inbound_humidity: inHum,
hiIn,
outbound_dockDoor: outDoor,
outbound_temperature: outTemp,
outbound_humidity: outHum,
hiOut,
shift,
period,
timestamp: broadcastTimestamp,
csvUrl // include the CSV download URL
};
console.log('🛠️ Slack payload:', slackPayload);
await axios.post(process.env.SLACK_WEBHOOK_URL, slackPayload);
res.json({ success: true, shift, period, csvUrl });
} catch (err) {
console.error('❌ POST /api/readings error:', err);
res.status(500).json({ error: err.message });
}
});
// Return all readings
app.get('/api/readings', async (req, res) => {
try {
const [rows] = await pool.execute(`SELECT * FROM readings ORDER BY timestamp ASC`);
res.json(rows);
} catch (err) {
console.error('❌ GET /api/readings error:', err);
res.status(500).json({ error: err.message });
}
});
// Export CSV of all readings
app.get('/api/export', async (req, res) => {
try {
const [rows] = await pool.execute(`SELECT * FROM readings ORDER BY timestamp ASC`);
res.setHeader('Content-disposition', 'attachment; filename=readings.csv');
res.set('Content-Type', 'text/csv');
res.write('id,dockDoor,direction,timestamp,temperature,humidity,heatIndex\n');
rows.forEach(r => {
const ts = (r.timestamp instanceof Date) ? formatDateEST(r.timestamp) : r.timestamp;
res.write(`${r.id},${r.dockDoor},${r.direction},${ts},${r.temperature},${r.humidity},${r.heatIndex}\n`);
});
res.end();
} catch (err) {
console.error('❌ GET /api/export error:', err);
res.status(500).send(err.message);
}
});
// Start server
app.listen(PORT, () => {
console.log(`Server running on http://localhost:${PORT}`);
});