add docker stuff

This commit is contained in:
Gabriel Brown 2025-01-13 16:48:53 -06:00
parent e2c65425af
commit ddc9bb0cfa
12 changed files with 286 additions and 100 deletions

View File

@ -1,17 +0,0 @@
# Since the ".env" file is gitignored, you can use the ".env.example" file to
# build a new ".env" file when you clone the repo. Keep this file up-to-date
# when you add new variables to `.env`.
# This file will be committed to version control, so make sure not to have any
# secrets in it. If you are cloning this repo, create a copy of this file named
# ".env" and populate it with your secrets.
# When adding additional environment variables, the schema in "/src/env.js"
# should be updated accordingly.
# Drizzle
DATABASE_URL="postgresql://postgres:password@localhost:5432/wavelength_server"
# Example:
# SERVERVAR="foo"
# NEXT_PUBLIC_CLIENTVAR="bar"

View File

@ -0,0 +1,53 @@
# syntax=docker.io/docker/dockerfile:1
FROM node:18-alpine AS base
# 1. Install dependencies only when needed
FROM base AS deps
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
RUN apk add --no-cache libc6-compat
WORKDIR /app
# Install dependencies based on the preferred package manager
COPY package.json yarn.lock* package-lock.json* pnpm-lock.yaml* .npmrc* ./
RUN \
if [ -f yarn.lock ]; then yarn --frozen-lockfile; \
elif [ -f package-lock.json ]; then npm ci; \
elif [ -f pnpm-lock.yaml ]; then corepack enable pnpm && pnpm i; \
else echo "Lockfile not found." && exit 1; \
fi
# 2. Rebuild the source code only when needed
FROM base AS builder
WORKDIR /app
COPY --from=deps /app/node_modules ./node_modules
COPY . .
# This will do the trick, use the corresponding env file for each environment.
COPY .env .env.production
RUN npm run build
# 3. Production image, copy all the files and run next
FROM base AS runner
WORKDIR /app
ENV NODE_ENV=production
RUN addgroup -g 1001 -S nodejs
RUN adduser -S nextjs -u 1001
COPY --from=builder /app/public ./public
# Automatically leverage output traces to reduce image size
# https://nextjs.org/docs/advanced-features/output-file-tracing
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
USER nextjs
EXPOSE 3000
ENV PORT=3000
CMD HOSTNAME="0.0.0.0" node server.js

View File

@ -0,0 +1,17 @@
services:
wavelength_server:
build:
context: ../../
dockerfile: docker/development/Dockerfile
image: with-docker-multi-env-development
container_name: wavelength_server
networks:
- node_apps
ports:
- "3002:3000"
tty: true
restart: unless-stopped
networks:
node_apps:
external: true

View File

@ -0,0 +1,54 @@
# syntax=docker.io/docker/dockerfile:1
FROM node:18-alpine AS base
# 1. Install dependencies only when needed
FROM base AS deps
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
RUN apk add --no-cache libc6-compat
WORKDIR /app
# Install dependencies based on the preferred package manager
COPY package.json yarn.lock* package-lock.json* pnpm-lock.yaml* .npmrc* ./
RUN \
if [ -f yarn.lock ]; then yarn --frozen-lockfile; \
elif [ -f package-lock.json ]; then npm ci; \
elif [ -f pnpm-lock.yaml ]; then corepack enable pnpm && pnpm i; \
else echo "Lockfile not found." && exit 1; \
fi
# 2. Rebuild the source code only when needed
FROM base AS builder
WORKDIR /app
COPY --from=deps /app/node_modules ./node_modules
COPY . .
# This will do the trick, use the corresponding env file for each environment.
COPY .env .env.production
RUN npm run build
# 3. Production image, copy all the files and run next
FROM base AS runner
WORKDIR /app
ENV NODE_ENV=production
RUN addgroup -g 1001 -S nodejs
RUN adduser -S nextjs -u 1001
COPY --from=builder /app/public ./public
# Automatically leverage output traces to reduce image size
# https://nextjs.org/docs/advanced-features/output-file-tracing
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
USER nextjs
EXPOSE 3000
ENV PORT=3000
CMD HOSTNAME="0.0.0.0" node server.js

View File

@ -0,0 +1,16 @@
services:
wavelength_server:
build:
context: ../../
dockerfile: docker/production/Dockerfile
image: with-docker-multi-env-development
container_name: wavelength_server
networks:
- node_apps
ports:
- "3002:3000"
tty: true
restart: unless-stopped
networks:
node_apps:
external: true

View File

@ -2,11 +2,12 @@
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation. This is especially useful
* for Docker builds.
*/
await import("./src/env.js");
import './src/env.js';
/** @type {import("next").NextConfig} */
const config = {
output: 'standalone',
};
export default config;

View File

@ -13,7 +13,7 @@
"lint": "next lint",
"start": "next start",
"go": "next dev",
"go:prod": "next build && next start"
"goprod": "next build && next start"
},
"dependencies": {
"@t3-oss/env-nextjs": "^0.10.1",

105
scripts/files_to_clipboard.py Executable file
View File

@ -0,0 +1,105 @@
#!/usr/bin/env python3
import os
import sys
import argparse
from pathlib import Path
import pyperclip
import questionary
# List of directories to exclude
EXCLUDED_DIRS = {'node_modules', '.next', '.venv', '.git', '__pycache__', '.idea', '.vscode', 'ui'}
def collect_files(project_path):
"""
Collects files from the project directory, excluding specified directories and filtering by extensions.
Returns a list of file paths relative to the project directory.
"""
collected_files = []
for root, dirs, files in os.walk(project_path):
# Exclude specified directories
dirs[:] = [d for d in dirs if d not in EXCLUDED_DIRS]
for file in files:
file_path = Path(root) / file
relative_path = file_path.relative_to(project_path)
collected_files.append(relative_path)
return collected_files
def main():
# Parse command-line arguments
parser = argparse.ArgumentParser(description='Generate Markdown from selected files.')
parser.add_argument('path', nargs='?', default='.', help='Path to the project directory')
args = parser.parse_args()
project_path = Path(args.path).resolve()
if not project_path.is_dir():
print(f"Error: '{project_path}' is not a directory.")
sys.exit(1)
# Collect files from the project directory
file_list = collect_files(project_path)
if not file_list:
print("No files found in the project directory with the specified extensions.")
sys.exit(1)
# Sort file_list for better organization
file_list.sort()
# Interactive file selection using questionary
print("\nSelect the files you want to include:")
selected_files = questionary.checkbox(
"Press space to select files, and Enter when you're done:",
choices=[str(f) for f in file_list]
).ask()
if not selected_files:
print("No files selected.")
sys.exit(1)
# Generate markdown
markdown_lines = []
markdown_lines.append('')
for selected_file in selected_files:
file_path = project_path / selected_file
try:
with open(file_path, 'r', encoding='utf-8') as f:
content = f.read()
# Determine the language for code block from file extension
language = file_path.suffix.lstrip('.')
markdown_lines.append(f'{selected_file}')
markdown_lines.append(f'```{language}')
markdown_lines.append(content)
markdown_lines.append('```')
markdown_lines.append('')
except Exception as e:
print(f"Error reading file {selected_file}: {e}")
markdown_text = '\n'.join(markdown_lines)
# Write markdown to file
output_file = 'output.md'
with open(output_file, 'w', encoding='utf-8') as f:
f.write(markdown_text)
print(f"\nMarkdown file '{output_file}' has been generated.")
# Copy markdown content to clipboard
pyperclip.copy(markdown_text)
print("Markdown content has been copied to the clipboard.")
if __name__ == "__main__":
# Check if required libraries are installed
try:
import questionary
import pyperclip
except ImportError as e:
missing_module = e.name
print(f"Error: Missing required module '{missing_module}'.")
print(f"Please install it by running: pip install {missing_module}")
sys.exit(1)
main()

View File

@ -0,0 +1,18 @@
/**
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation. This is especially useful
* for Docker builds.
*/
import './src/env.js';
/** @type {import("next").NextConfig} */
const config = {
output: 'standalone',
typescript: {
ignoreBuildErrors: true,
},
eslint: {
ignoreDuringBuilds: true,
},
};
export default config;

12
scripts/next.config.default.js Executable file
View File

@ -0,0 +1,12 @@
/**
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation. This is especially useful
* for Docker builds.
*/
import './src/env.js';
/** @type {import("next").NextConfig} */
const config = {
output: 'standalone',
};
export default config;

7
scripts/reload_container.sh Executable file
View File

@ -0,0 +1,7 @@
git pull
mv ~/Documents/Web/wavelength_server/next.config.js ~/Documents/Web/wavelength_server/scripts/next.config.default.js
cp ~/Documents/Web/wavelength_server/scripts/next.config.build.js ~/Documents/Web/wavelength_server/next.config.js
sudo docker compose -f docker/development/compose.yaml down
sudo docker compose -f docker/development/compose.yaml build
sudo docker compose -f docker/development/compose.yaml up -d
cp ~/Documents/Web/wavelength_server/scripts/next.config.default.js ~/Documents/Web/wavelength_server/next.config.js

View File

@ -1,80 +0,0 @@
// WebSocket for client to receive messages
//import { Server } from 'socket.io';
//import { Client, Notification } from 'pg';
//import { NextResponse } from 'next/server';
//import type { NextRequest } from 'next/server';
//import type { Message } from '~/server/types';
//let isInitialized = false;
//const io = new Server();
//const pgClient = new Client({
//connectionString: process.env.DATABASE_URL,
//});
//const initializePostgres = async () => {
//if (isInitialized) return;
//try {
//await pgClient.connect();
//console.log('Connected to PostgreSQL');
//await pgClient.query('LISTEN new_message');
//pgClient.on('notification', (msg: Notification) => {
//try {
//const newMessage: Message = JSON.parse(msg.payload ?? '{}') as Message;
//const { receiverId, text, id } = newMessage;
//if (receiverId && typeof receiverId === 'number') {
//io.to(receiverId.toString()).emit('message', newMessage);
//console.log(`Message sent to room ${receiverId} with text: ${text}`);
//} else {
//console.error('Invalid receiverId:', receiverId);
//}
//} catch (error) {
//console.error('Error parsing notification payload:', error);
//}
//});
//isInitialized = true;
//} catch (error) {
//console.error('Error connecting to PostgreSQL:', error);
//throw error;
//}
//};
//io.on('connection', (socket) => {
//console.log('WebSocket client connected', socket.id);
//socket.on('join', async (userId: number) => {
//const roomId = userId.toString();
//await socket.join(roomId);
//console.log(`WebSocket client joined room ${userId}`);
//});
//socket.on('error', (error) => {
//console.error('WebSocket error:', error);
//});
//socket.on('disconnect', () => {
//console.log('WebSocket client disconnected');
//});
//});
//export const runtime = 'edge';
//export const GET = async (request: NextRequest) => {
//try {
//await initializePostgres();
//// @ts-expect-error: Socket.IO types conflict with Next.js Request
//const upgrade = await io.handleUpgrade(request);
//if (!upgrade || !upgrade.headers)
//throw new Error('Failed to upgrade connection');
//return new NextResponse(null, {
//status: 101,
//headers: upgrade.headers,
//});
//} catch (error) {
//console.error('Error handling upgrade:', error);
//return NextResponse.json({ message: 'Internal server error' }, { status: 500 });
//}
//};