Upload files to "/"

This commit is contained in:
JoshBaneyCS 2025-04-29 01:20:30 +00:00
parent 12c735eeff
commit a7f5e6fcac
5 changed files with 102 additions and 0 deletions

24
.env Normal file
View File

@ -0,0 +1,24 @@
# Server port
PORT=3000
WEATHER_API_KEY=openweathermap_api_key
ZIP_CODE=00000
# MariaDB connection
DB_CLIENT=mysql
DB_HOST=0.0.0.0
DB_PORT=3307
DB_USER=joshbaney
DB_PASSWORD=Ran0dal5!
DB_NAME=heatmap
#AWS s3
S3_BUCKET_URL=https://s3.amazonaws.com/bwi2temps/trends
# Slack & AWS creds
SLACK_WEBHOOK_URL=https://hooks.slack.com/triggers/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
AWS_ACCESS_KEY_ID=ihya/m4CONlywOPCNER22oZrbOeCdJLxp3R4H3oF
AWS_SECRET_ACCESS_KEY=AKIAQ3EGSIYOYP4L37HH
AWS_REGION=us-east-2
S3_BUCKET_NAME=bwi2temps
S3_BASE_URL=https://s3.amazonaws.com/bwi2temps/

16
Dockerfile Normal file
View File

@ -0,0 +1,16 @@
# Dockerfile
FROM node:18-alpine
# Create app directory
WORKDIR /usr/src/app
# Install dependencies
COPY package*.json ./
RUN npm install --production
# Bundle app source
COPY . .
# Expose port and run
EXPOSE 3000
CMD ["node", "server.js"]

14
docker-compose.yaml Normal file
View File

@ -0,0 +1,14 @@
version: '3.8'
services:
fuego-app:
build:
context: .
dockerfile: Dockerfile
extra_hosts:
- "host.docker.internal:host-gateway"
env_file:
- .env
ports:
- "3000:3000"
restart: unless-stopped

18
package.json Normal file
View File

@ -0,0 +1,18 @@
{
"name": "Amazon-Fuego",
"version": "1.0.0",
"main": "server.js",
"scripts": {
"start": "node server.js"
},
"dependencies": {
"@aws-sdk/client-s3": "^3.300.0",
"axios": "^1.4.0",
"body-parser": "^1.20.2",
"csv-stringify": "^6.0.8",
"dotenv": "^16.3.1",
"express": "^4.18.2",
"mysql2": "^3.3.3",
"sqlite3": "^5.1.6"
}
}

30
s3.js Normal file
View File

@ -0,0 +1,30 @@
// s3.js
require('dotenv').config();
const { S3Client, PutObjectCommand } = require('@aws-sdk/client-s3');
const { stringify } = require('csv-stringify/sync');
const s3 = new S3Client({
region: process.env.AWS_REGION
});
/**
* Uploads a CSV to s3://<bucket>/trends/<key>.csv with public-read ACL.
* @param {string} key Date key like '20250423'
* @param {Array<Object>} rows Array of DB rows to stringify into CSV
* @returns {string} Public URL of the uploaded CSV
*/
async function uploadTrendsCsv(key, rows) {
// Convert rows to CSV string (includes header)
const csv = stringify(rows, { header: true });
const cmd = new PutObjectCommand({
Bucket: process.env.S3_BUCKET_NAME,
Key: `trends/${key}.csv`,
Body: csv,
ContentType: 'text/csv',
ACL: 'public-read'
});
await s3.send(cmd);
return `${process.env.S3_BASE_URL}/${key}.csv`;
}
module.exports = { uploadTrendsCsv };