This commit is contained in:
41
.gitea/workflows/deploy.yaml
Normal file
41
.gitea/workflows/deploy.yaml
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
name: Deploy to Server
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Install SSH Key
|
||||||
|
run: |
|
||||||
|
mkdir -p ~/.ssh
|
||||||
|
echo "${{ secrets.DEPLOY_KEY }}" > ~/.ssh/id_ed25519
|
||||||
|
chmod 600 ~/.ssh/id_ed25519
|
||||||
|
ssh-keyscan 192.168.30.114 >> ~/.ssh/known_hosts
|
||||||
|
|
||||||
|
- name: Deploy via SSH
|
||||||
|
run: |
|
||||||
|
ssh deployuser@192.168.30.114 << 'EOF'
|
||||||
|
set -euo pipefail
|
||||||
|
APP_DIR=/opt/Train-ID
|
||||||
|
SERVICE=train-id
|
||||||
|
if [ ! -d "$APP_DIR" ]; then
|
||||||
|
sudo mkdir -p "$APP_DIR"
|
||||||
|
sudo chown "$USER":"$USER" "$APP_DIR"
|
||||||
|
git clone https://git.hudsonriggs.systems/HRiggs/Train-ID.git "$APP_DIR"
|
||||||
|
fi
|
||||||
|
cd "$APP_DIR"
|
||||||
|
git pull origin main
|
||||||
|
# Install Node.js deps and build
|
||||||
|
npm ci || npm install
|
||||||
|
npm run build
|
||||||
|
# Ensure systemd service exists and restart
|
||||||
|
if systemctl list-unit-files | grep -q "${SERVICE}.service"; then
|
||||||
|
sudo systemctl restart "$SERVICE"
|
||||||
|
else
|
||||||
|
echo "Warning: ${SERVICE}.service not found; start your process manager manually."
|
||||||
|
fi
|
||||||
|
EOF
|
||||||
Binary file not shown.
138
README.md
138
README.md
@@ -1,2 +1,140 @@
|
|||||||
# Train-ID
|
# Train-ID
|
||||||
|
|
||||||
|
Simple Node/Express app to identify and inventory model trains from photos using OpenAI Vision, store results in MySQL, and export PDFs/XLSX.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
- Node.js 20+ and npm
|
||||||
|
- MySQL 8+
|
||||||
|
- OpenAI API key
|
||||||
|
|
||||||
|
## Environment variables
|
||||||
|
Create a `.env` in the project root on the server:
|
||||||
|
|
||||||
|
```
|
||||||
|
PORT=3000
|
||||||
|
# OpenAI
|
||||||
|
OPENAI_API_KEY=sk-...
|
||||||
|
# or alternative variable name also supported by code:
|
||||||
|
# openapi_key=sk-...
|
||||||
|
|
||||||
|
# MySQL connection
|
||||||
|
db_ip=127.0.0.1
|
||||||
|
db_port=3306
|
||||||
|
db_user=trainid
|
||||||
|
db_pass=changeme
|
||||||
|
db_name=trainid
|
||||||
|
```
|
||||||
|
|
||||||
|
## Local development
|
||||||
|
```
|
||||||
|
npm install
|
||||||
|
npm run build
|
||||||
|
npm start
|
||||||
|
# or for auto-reload on TypeScript build changes:
|
||||||
|
npm run dev
|
||||||
|
```
|
||||||
|
Visit `http://localhost:3000`.
|
||||||
|
|
||||||
|
## API overview
|
||||||
|
- POST `/api/upload` with `multipart/form-data` field `image` → analyzes photo, inserts record
|
||||||
|
- GET `/api/items` → list items (optional `?q=` to search model/SKU)
|
||||||
|
- GET `/api/items/:id/pdf` → generate/download PDF for one item
|
||||||
|
- GET `/api/export/xlsx` → download XLSX export of inventory with embedded thumbnails
|
||||||
|
- DELETE `/api/items/:id` → delete one
|
||||||
|
- DELETE `/api/items` → wipe all
|
||||||
|
|
||||||
|
## Debian 13 (Trixie) LXC install
|
||||||
|
These steps assume a fresh Debian 13 LXC and deployment directory `/opt/Train-ID` with a system user `deployuser` that has passwordless sudo for service management.
|
||||||
|
|
||||||
|
1) Base packages and Node.js
|
||||||
|
```
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y curl ca-certificates gnupg build-essential pkg-config
|
||||||
|
# Node 20 using Nodesource or Debian repo (choose one). Nodesource example:
|
||||||
|
curl -fsSL https://deb.nodesource.com/setup_20.x | sudo -E bash -
|
||||||
|
sudo apt install -y nodejs
|
||||||
|
node -v && npm -v
|
||||||
|
```
|
||||||
|
|
||||||
|
2) MySQL server (or connect to external MySQL)
|
||||||
|
```
|
||||||
|
sudo apt install -y mariadb-server mariadb-client
|
||||||
|
sudo systemctl enable --now mariadb
|
||||||
|
sudo mysql -e "CREATE DATABASE IF NOT EXISTS trainid CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;"
|
||||||
|
sudo mysql -e "CREATE USER IF NOT EXISTS 'trainid'@'%' IDENTIFIED BY 'changeme';"
|
||||||
|
sudo mysql -e "GRANT ALL PRIVILEGES ON trainid.* TO 'trainid'@'%'; FLUSH PRIVILEGES;"
|
||||||
|
```
|
||||||
|
If using external MySQL, skip install and set `db_ip` etc. in `.env`.
|
||||||
|
|
||||||
|
3) App checkout and build
|
||||||
|
```
|
||||||
|
sudo mkdir -p /opt/Train-ID
|
||||||
|
sudo chown $USER:$USER /opt/Train-ID
|
||||||
|
git clone https://git.hudsonriggs.systems/HRiggs/Train-ID.git /opt/Train-ID
|
||||||
|
cd /opt/Train-ID
|
||||||
|
cp .env.example .env || true # if you keep a template in the future
|
||||||
|
# create .env as per above
|
||||||
|
npm ci || npm install
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
4) Systemd service
|
||||||
|
Create `/etc/systemd/system/train-id.service`:
|
||||||
|
```
|
||||||
|
[Unit]
|
||||||
|
Description=Train-ID API
|
||||||
|
After=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
WorkingDirectory=/opt/Train-ID
|
||||||
|
EnvironmentFile=/opt/Train-ID/.env
|
||||||
|
ExecStart=/usr/bin/node /opt/Train-ID/dist/server.js
|
||||||
|
Restart=always
|
||||||
|
RestartSec=5
|
||||||
|
User=www-data
|
||||||
|
Group=www-data
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
```
|
||||||
|
Then enable and start:
|
||||||
|
```
|
||||||
|
sudo systemctl daemon-reload
|
||||||
|
sudo systemctl enable --now train-id
|
||||||
|
sudo systemctl status train-id --no-pager
|
||||||
|
```
|
||||||
|
|
||||||
|
5) Reverse proxy (optional, for port 80/443)
|
||||||
|
Example Nginx site (`/etc/nginx/sites-available/train-id`):
|
||||||
|
```
|
||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
server_name _;
|
||||||
|
location / {
|
||||||
|
proxy_pass http://127.0.0.1:3000;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
Enable and reload:
|
||||||
|
```
|
||||||
|
sudo apt install -y nginx
|
||||||
|
sudo ln -s /etc/nginx/sites-available/train-id /etc/nginx/sites-enabled/train-id
|
||||||
|
sudo nginx -t && sudo systemctl reload nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
## CI/CD deploy
|
||||||
|
The repo includes `.gitea/workflows/deploy.yaml` which, on push to `main`, SSHes to `192.168.30.114`, pulls latest, installs dependencies, builds, and restarts the `train-id` service. Set `DEPLOY_KEY` in repository secrets to a private SSH key authorized for `deployuser@192.168.30.114`.
|
||||||
|
|
||||||
|
If your service name or directory differ, update `SERVICE`/`APP_DIR` in the workflow accordingly.
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
echo "deployuser ALL=(root) NOPASSWD: /bin/systemctl restart train-id.service" | sudo tee /etc/sudoers.d/train-id >/dev/null
|
||||||
|
sudo chmod 440 /etc/sudoers.d/train-id
|
||||||
|
sudo visudo -cf /etc/sudoers.d/train-id
|
||||||
|
```
|
||||||
@@ -22,13 +22,20 @@ async function ensureSchema() {
|
|||||||
description text not null,
|
description text not null,
|
||||||
item_condition varchar(50) not null,
|
item_condition varchar(50) not null,
|
||||||
has_box boolean not null,
|
has_box boolean not null,
|
||||||
image_path text not null,
|
image_path text null,
|
||||||
|
image_data longblob null,
|
||||||
|
image_mime varchar(100) null,
|
||||||
created_at timestamp default current_timestamp
|
created_at timestamp default current_timestamp
|
||||||
) engine=InnoDB;
|
) engine=InnoDB;
|
||||||
`;
|
`;
|
||||||
await pool.query(sql);
|
await pool.query(sql);
|
||||||
// backfill for older tables without sku
|
// backfill for older tables without sku
|
||||||
await pool.query(`alter table items add column if not exists sku varchar(64) null` as any);
|
await pool.query(`alter table items add column if not exists sku varchar(64) null` as any);
|
||||||
|
// backfill nullable image_path
|
||||||
|
await pool.query(`alter table items modify column image_path text null` as any);
|
||||||
|
// add image_data and image_mime if missing
|
||||||
|
await pool.query(`alter table items add column if not exists image_data longblob null` as any);
|
||||||
|
await pool.query(`alter table items add column if not exists image_mime varchar(100) null` as any);
|
||||||
}
|
}
|
||||||
|
|
||||||
let schemaEnsured: Promise<void> | null = null;
|
let schemaEnsured: Promise<void> | null = null;
|
||||||
@@ -41,9 +48,20 @@ export const db = {
|
|||||||
async insertItem(item: Omit<TrainItem, 'id'>) {
|
async insertItem(item: Omit<TrainItem, 'id'>) {
|
||||||
await getReady();
|
await getReady();
|
||||||
const [result] = await pool.execute(
|
const [result] = await pool.execute(
|
||||||
`insert into items (manufacturer, model, sku, quantity, description, item_condition, has_box, image_path)
|
`insert into items (manufacturer, model, sku, quantity, description, item_condition, has_box, image_path, image_data, image_mime)
|
||||||
values (?,?,?,?,?,?,?,?)`,
|
values (?,?,?,?,?,?,?,?,?,?)`,
|
||||||
[item.manufacturer, item.model, item.sku || null, item.quantity, item.description, item.condition, item.hasBox, item.imagePath]
|
[
|
||||||
|
item.manufacturer,
|
||||||
|
item.model,
|
||||||
|
item.sku || null,
|
||||||
|
item.quantity,
|
||||||
|
item.description,
|
||||||
|
item.condition,
|
||||||
|
item.hasBox,
|
||||||
|
item.imagePath || null,
|
||||||
|
(item as any).imageData || null,
|
||||||
|
(item as any).imageMime || null
|
||||||
|
]
|
||||||
);
|
);
|
||||||
const insertId = (result as any).insertId as number;
|
const insertId = (result as any).insertId as number;
|
||||||
const [rows] = await pool.query(
|
const [rows] = await pool.query(
|
||||||
@@ -79,6 +97,27 @@ export const db = {
|
|||||||
if (row) { row.condition = row.cond; delete row.cond; }
|
if (row) { row.condition = row.cond; delete row.cond; }
|
||||||
return row;
|
return row;
|
||||||
},
|
},
|
||||||
|
async getItemWithImage(id: number) {
|
||||||
|
await getReady();
|
||||||
|
const [rows] = await pool.query(
|
||||||
|
`select id, manufacturer, model, sku, quantity, description, item_condition as cond, has_box as hasBox, image_path as imagePath, image_data as imageData, image_mime as imageMime, created_at as createdAt
|
||||||
|
from items where id = ?`,
|
||||||
|
[id]
|
||||||
|
);
|
||||||
|
const row = (rows as any[])[0] || null;
|
||||||
|
if (row) { row.condition = row.cond; delete row.cond; }
|
||||||
|
return row;
|
||||||
|
},
|
||||||
|
async listItemsWithImage() {
|
||||||
|
await getReady();
|
||||||
|
const [rows] = await pool.query(
|
||||||
|
`select id, manufacturer, model, sku, quantity, description, item_condition as cond, has_box as hasBox, image_path as imagePath, image_data as imageData, image_mime as imageMime, created_at as createdAt
|
||||||
|
from items order by id asc`
|
||||||
|
);
|
||||||
|
const arr = rows as any[];
|
||||||
|
for (const r of arr) { r.condition = r.cond; delete r.cond; }
|
||||||
|
return arr;
|
||||||
|
},
|
||||||
async deleteItem(id: number) {
|
async deleteItem(id: number) {
|
||||||
await getReady();
|
await getReady();
|
||||||
const [res] = await pool.query('delete from items where id = ?', [id]);
|
const [res] = await pool.query('delete from items where id = ?', [id]);
|
||||||
|
|||||||
@@ -21,6 +21,10 @@ router.post('/upload', async (req, res) => {
|
|||||||
|
|
||||||
const analysis = await analyzeImageToMetadata(savePath);
|
const analysis = await analyzeImageToMetadata(savePath);
|
||||||
|
|
||||||
|
const imageBuffer = await fs.readFile(savePath);
|
||||||
|
const imageMime: string = file.mimetype || 'application/octet-stream';
|
||||||
|
|
||||||
|
// Insert with image bytes and mime; do not persist path
|
||||||
const saved = await db.insertItem({
|
const saved = await db.insertItem({
|
||||||
manufacturer: analysis.manufacturer,
|
manufacturer: analysis.manufacturer,
|
||||||
model: analysis.model,
|
model: analysis.model,
|
||||||
@@ -29,8 +33,13 @@ router.post('/upload', async (req, res) => {
|
|||||||
description: analysis.description,
|
description: analysis.description,
|
||||||
condition: analysis.condition,
|
condition: analysis.condition,
|
||||||
hasBox: analysis.hasBox,
|
hasBox: analysis.hasBox,
|
||||||
imagePath: savePath,
|
imagePath: null as any,
|
||||||
});
|
imageData: imageBuffer as any,
|
||||||
|
imageMime: imageMime as any
|
||||||
|
} as any);
|
||||||
|
|
||||||
|
// Remove uploaded file after DB storage
|
||||||
|
try { await fs.unlink(savePath); } catch {}
|
||||||
|
|
||||||
res.json({ item: saved });
|
res.json({ item: saved });
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
@@ -53,7 +62,7 @@ router.get('/items', async (req, res) => {
|
|||||||
router.get('/items/:id/pdf', async (req, res) => {
|
router.get('/items/:id/pdf', async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const id = Number(req.params.id);
|
const id = Number(req.params.id);
|
||||||
const item = await db.getItem(id);
|
const item = await db.getItemWithImage(id);
|
||||||
if (!item) return res.status(404).json({ error: 'Not found' });
|
if (!item) return res.status(404).json({ error: 'Not found' });
|
||||||
const pdfPath = await generatePdfForItem(item);
|
const pdfPath = await generatePdfForItem(item);
|
||||||
res.setHeader('Content-Type', 'application/pdf');
|
res.setHeader('Content-Type', 'application/pdf');
|
||||||
@@ -79,11 +88,41 @@ router.get('/export/xlsx', async (_req, res) => {
|
|||||||
{ header: 'Condition', key: 'condition', width: 14 },
|
{ header: 'Condition', key: 'condition', width: 14 },
|
||||||
{ header: 'Description', key: 'description', width: 50 },
|
{ header: 'Description', key: 'description', width: 50 },
|
||||||
{ header: 'Has Box', key: 'hasBox', width: 10 },
|
{ header: 'Has Box', key: 'hasBox', width: 10 },
|
||||||
{ header: 'Image Path', key: 'imagePath', width: 50 },
|
{ header: 'Image', key: 'image', width: 25 },
|
||||||
{ header: 'Created', key: 'createdAt', width: 24 }
|
{ header: 'Created', key: 'createdAt', width: 24 }
|
||||||
];
|
];
|
||||||
const items = await db.listItems();
|
const items = await db.listItemsWithImage();
|
||||||
for (const it of items) ws.addRow(it);
|
let rowIndex = 2; // 1-based, row 1 is header
|
||||||
|
for (const it of items) {
|
||||||
|
// Add row without imagePath, we have an Image column placeholder
|
||||||
|
const row = ws.addRow({
|
||||||
|
id: it.id,
|
||||||
|
manufacturer: it.manufacturer,
|
||||||
|
model: it.model,
|
||||||
|
sku: it.sku,
|
||||||
|
quantity: it.quantity,
|
||||||
|
condition: it.condition,
|
||||||
|
description: it.description,
|
||||||
|
hasBox: it.hasBox ? 'Yes' : 'No',
|
||||||
|
image: '',
|
||||||
|
createdAt: it.createdAt
|
||||||
|
});
|
||||||
|
// Adjust row height for image display
|
||||||
|
row.height = 90;
|
||||||
|
// Embed image if available
|
||||||
|
const buf = (it as any).imageData as unknown as Uint8Array | null;
|
||||||
|
const mime: string | null = (it as any).imageMime || null;
|
||||||
|
if (buf && mime && (mime.includes('png') || mime.includes('jpeg') || mime.includes('jpg'))) {
|
||||||
|
const imgId = wb.addImage({ buffer: Buffer.from(buf as any) as any, extension: mime.includes('png') ? 'png' : 'jpeg' });
|
||||||
|
// Place image in the Image column (index 9, zero-based col 8) anchored to this row
|
||||||
|
const colIndexZeroBased = 8; // 0-based; column I (Image) since A=0
|
||||||
|
ws.addImage(imgId, {
|
||||||
|
tl: { col: colIndexZeroBased, row: rowIndex - 1 },
|
||||||
|
ext: { width: 120, height: 80 }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
rowIndex++;
|
||||||
|
}
|
||||||
res.setHeader('Content-Type', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet');
|
res.setHeader('Content-Type', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet');
|
||||||
res.setHeader('Content-Disposition', 'attachment; filename="train_inventory.xlsx"');
|
res.setHeader('Content-Disposition', 'attachment; filename="train_inventory.xlsx"');
|
||||||
await wb.xlsx.write(res);
|
await wb.xlsx.write(res);
|
||||||
|
|||||||
@@ -11,7 +11,10 @@ export type TrainItem = {
|
|||||||
description: string; // 1 sentence
|
description: string; // 1 sentence
|
||||||
condition: string; // e.g., New, Excellent, Good, Fair, Poor
|
condition: string; // e.g., New, Excellent, Good, Fair, Poor
|
||||||
hasBox: boolean;
|
hasBox: boolean;
|
||||||
imagePath: string;
|
// Prefer in-DB storage; path retained for legacy/backfill
|
||||||
|
imagePath?: string;
|
||||||
|
imageData?: Buffer;
|
||||||
|
imageMime?: string;
|
||||||
id?: number;
|
id?: number;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -39,7 +39,12 @@ export async function generatePdfForItem(item: TrainItem & { id: number }): Prom
|
|||||||
try {
|
try {
|
||||||
doc.moveDown();
|
doc.moveDown();
|
||||||
doc.text('Photo:', { continued: false });
|
doc.text('Photo:', { continued: false });
|
||||||
doc.image(item.imagePath, { width: 300 });
|
const imageBuffer: Buffer | undefined = (item as any).imageData;
|
||||||
|
if (imageBuffer && imageBuffer.length > 0) {
|
||||||
|
doc.image(imageBuffer, { width: 300 });
|
||||||
|
} else if (item.imagePath) {
|
||||||
|
doc.image(item.imagePath, { width: 300 });
|
||||||
|
}
|
||||||
} catch {}
|
} catch {}
|
||||||
|
|
||||||
doc.end();
|
doc.end();
|
||||||
|
|||||||
Reference in New Issue
Block a user