diff --git a/.github/workflows/nextjs.yml b/.github/workflows/nextjs.yml index 0200770..3db671a 100644 --- a/.github/workflows/nextjs.yml +++ b/.github/workflows/nextjs.yml @@ -1,8 +1,5 @@ -# Sample workflow for building and deploying a Next.js site to GitHub Pages -# -# To get started with Next.js see: https://nextjs.org/docs/getting-started -# -name: Deploy Next.js site to Pages +# Workflow for building and deploying a Next.js site to AWS S3 +name: Deploy Next.js site to S3 on: # Runs on pushes targeting the default branch @@ -12,82 +9,61 @@ on: # Allows you to run this workflow manually from the Actions tab workflow_dispatch: -# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages -permissions: - contents: read - pages: write - id-token: write - # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. -# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. concurrency: - group: "pages" + group: "s3-deployment" cancel-in-progress: false jobs: - # Build job - build: + build-and-deploy: runs-on: ubuntu-latest + permissions: + id-token: write + contents: read steps: - name: Checkout uses: actions/checkout@v4 - - name: Detect package manager - id: detect-package-manager - run: | - if [ -f "${{ github.workspace }}/yarn.lock" ]; then - echo "manager=yarn" >> $GITHUB_OUTPUT - echo "command=install" >> $GITHUB_OUTPUT - echo "runner=yarn" >> $GITHUB_OUTPUT - exit 0 - elif [ -f "${{ github.workspace }}/package.json" ]; then - echo "manager=npm" >> $GITHUB_OUTPUT - echo "command=ci" >> $GITHUB_OUTPUT - echo "runner=npx --no-install" >> $GITHUB_OUTPUT - exit 0 - else - echo "Unable to determine package manager" - exit 1 - fi - - name: Setup Node + + - name: Setup Node.js uses: actions/setup-node@v4 with: node-version: "20" - cache: ${{ steps.detect-package-manager.outputs.manager }} - - name: Setup Pages - uses: actions/configure-pages@v5 - with: - # Automatically inject basePath in your Next.js configuration file and disable - # server side image optimization (https://nextjs.org/docs/api-reference/next/image#unoptimized). - # - # You may remove this line if you want to manage the configuration yourself. - static_site_generator: next - - name: Restore cache + cache: 'npm' + + - name: Create .env.local file + run: | + echo "YOUTUBE_API_KEY=${{ secrets.YOUTUBE_API_KEY }}" > .env.local + echo "S3_BUCKET_NAME=${{ secrets.S3_BUCKET_NAME }}" >> .env.local + + - name: Install dependencies + run: npm ci + + - name: Restore Next.js cache uses: actions/cache@v4 with: path: | .next/cache - # Generate a new cache whenever packages or source files change. - key: ${{ runner.os }}-nextjs-${{ hashFiles('**/package-lock.json', '**/yarn.lock') }}-${{ hashFiles('**.[jt]s', '**.[jt]sx') }} - # If source files changed but packages didn't, rebuild from a prior cache. + key: ${{ runner.os }}-nextjs-${{ hashFiles('**/package-lock.json') }}-${{ hashFiles('**.[jt]s', '**.[jt]sx') }} restore-keys: | - ${{ runner.os }}-nextjs-${{ hashFiles('**/package-lock.json', '**/yarn.lock') }}- - - name: Install dependencies - run: ${{ steps.detect-package-manager.outputs.manager }} ${{ steps.detect-package-manager.outputs.command }} - - name: Build with Next.js - run: ${{ steps.detect-package-manager.outputs.runner }} next build - - name: Upload artifact - uses: actions/upload-pages-artifact@v3 + ${{ runner.os }}-nextjs-${{ hashFiles('**/package-lock.json') }}- + + - name: Build Next.js site + env: + YOUTUBE_API_KEY: ${{ secrets.YOUTUBE_API_KEY }} + S3_BUCKET_NAME: ${{ secrets.S3_BUCKET_NAME }} + run: npm run build:static + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 with: - path: ./out - - # Deployment job - # deploy: - # environment: - # name: github-pages - # url: ${{ steps.deployment.outputs.page_url }} - # runs-on: ubuntu-latest - # needs: build - # steps: - # - name: Deploy to GitHub Pages - # id: deployment - # uses: actions/deploy-pages@v4 + role-to-assume: arn:aws:iam::499518182498:role/cheatingchelsea-github-deployment + aws-region: us-east-2 + + - name: Deploy to S3 + run: | + aws s3 sync out/ s3://${{ secrets.S3_BUCKET_NAME }} --delete --no-cli-pager + + - name: Output deployment URL + run: | + echo "šŸŽ‰ Deployment successful!" + echo "Your site is available at: http://${{ secrets.S3_BUCKET_NAME }}.s3-website-us-east-2.amazonaws.com" diff --git a/next.config.ts b/next.config.ts index 920d1b7..f107103 100644 --- a/next.config.ts +++ b/next.config.ts @@ -2,6 +2,9 @@ import type {NextConfig} from 'next'; const nextConfig: NextConfig = { /* config options here */ + output: 'export', + trailingSlash: true, + skipTrailingSlashRedirect: true, typescript: { ignoreBuildErrors: true, }, diff --git a/package.json b/package.json index 8f28947..4b28cb6 100644 --- a/package.json +++ b/package.json @@ -7,9 +7,12 @@ "genkit:dev": "genkit start -- tsx src/ai/dev.ts", "genkit:watch": "genkit start -- tsx --watch src/ai/dev.ts", "build": "next build", + "build:static": "next build", "start": "next start", "lint": "next lint", - "typecheck": "tsc --noEmit" + "typecheck": "tsc --noEmit", + "deploy:s3": "npm run build:static && aws s3 sync out/ s3://$S3_BUCKET_NAME --delete --no-cli-pager", + "create-s3-bucket": "node scripts/create-s3-bucket.js" }, "dependencies": { "@genkit-ai/googleai": "^1.13.0", diff --git a/scripts/create-s3-bucket.js b/scripts/create-s3-bucket.js new file mode 100755 index 0000000..7db5b75 --- /dev/null +++ b/scripts/create-s3-bucket.js @@ -0,0 +1,118 @@ +#!/usr/bin/env node + +const { execSync } = require('child_process'); +const readline = require('readline'); + +const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout +}); + +function askQuestion(question) { + return new Promise((resolve) => { + rl.question(question, (answer) => { + resolve(answer); + }); + }); +} + +async function main() { + try { + console.log('šŸš€ Setting up S3 bucket for static website hosting\n'); + + // Get bucket name + const bucketName = await askQuestion('Enter your S3 bucket name (must be globally unique): '); + + if (!bucketName) { + console.error('āŒ Bucket name is required'); + process.exit(1); + } + + // Get AWS region + const region = await askQuestion('Enter AWS region (default: us-east-1): ') || 'us-east-1'; + + console.log(`\nšŸ“¦ Creating S3 bucket: ${bucketName} in region: ${region}`); + + // Create bucket + try { + if (region === 'us-east-1') { + execSync(`aws s3 mb s3://${bucketName} --no-cli-pager`, { stdio: 'inherit' }); + } else { + execSync(`aws s3 mb s3://${bucketName} --region ${region} --no-cli-pager`, { stdio: 'inherit' }); + } + console.log('āœ… Bucket created successfully'); + } catch (error) { + console.error('āŒ Failed to create bucket. It might already exist or you might not have permissions.'); + console.error('Error:', error.message); + } + + // Enable static website hosting + console.log('\n🌐 Enabling static website hosting...'); + try { + execSync(`aws s3 website s3://${bucketName} --index-document index.html --error-document error.html --no-cli-pager`, { stdio: 'inherit' }); + console.log('āœ… Static website hosting enabled'); + } catch (error) { + console.error('āŒ Failed to enable static website hosting'); + console.error('Error:', error.message); + } + + // Create bucket policy for public read access + const bucketPolicy = { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "PublicReadGetObject", + "Effect": "Allow", + "Principal": "*", + "Action": "s3:GetObject", + "Resource": `arn:aws:s3:::${bucketName}/*` + } + ] + }; + + // Write policy to temporary file + require('fs').writeFileSync('/tmp/bucket-policy.json', JSON.stringify(bucketPolicy, null, 2)); + + console.log('\nšŸ”“ Setting bucket policy for public read access...'); + try { + execSync(`aws s3api put-bucket-policy --bucket ${bucketName} --policy file:///tmp/bucket-policy.json --no-cli-pager`, { stdio: 'inherit' }); + console.log('āœ… Bucket policy applied'); + } catch (error) { + console.error('āŒ Failed to apply bucket policy'); + console.error('Error:', error.message); + } + + // Disable block public access + console.log('\nšŸ” Configuring public access settings...'); + try { + execSync(`aws s3api put-public-access-block --bucket ${bucketName} --public-access-block-configuration "BlockPublicAcls=false,IgnorePublicAcls=false,BlockPublicPolicy=false,RestrictPublicBuckets=false" --no-cli-pager`, { stdio: 'inherit' }); + console.log('āœ… Public access configured'); + } catch (error) { + console.error('āŒ Failed to configure public access'); + console.error('Error:', error.message); + } + + // Get website URL + const websiteUrl = `http://${bucketName}.s3-website-${region}.amazonaws.com`; + + console.log(`\nšŸŽ‰ Setup complete!`); + console.log(`\nšŸ“‹ Next steps:`); + console.log(`1. Set your bucket name as an environment variable:`); + console.log(` export S3_BUCKET_NAME=${bucketName}`); + console.log(`\n2. Deploy your site:`); + console.log(` npm run deploy:s3`); + console.log(`\n3. Your website will be available at:`); + console.log(` ${websiteUrl}`); + console.log(`\nšŸ’” Pro tip: Add S3_BUCKET_NAME=${bucketName} to your .env.local file`); + + // Clean up + require('fs').unlinkSync('/tmp/bucket-policy.json'); + + } catch (error) { + console.error('āŒ An error occurred:', error.message); + } finally { + rl.close(); + } +} + +main(); diff --git a/src/app/robots.ts b/src/app/robots.ts index 6e08cf5..6e4e74f 100644 --- a/src/app/robots.ts +++ b/src/app/robots.ts @@ -1,5 +1,7 @@ import type { MetadataRoute } from 'next'; +export const dynamic = 'force-static'; + export default function robots(): MetadataRoute.Robots { const baseUrl = 'https://cheatingchelsea.com'; return { diff --git a/src/app/sitemap.ts b/src/app/sitemap.ts index 8699df5..80c4288 100644 --- a/src/app/sitemap.ts +++ b/src/app/sitemap.ts @@ -1,5 +1,7 @@ import type { MetadataRoute } from 'next'; +export const dynamic = 'force-static'; + export default function sitemap(): MetadataRoute.Sitemap { const baseUrl = 'https://cheatingchelsea.com';