Skip to content

Commit

Permalink
init
Browse files Browse the repository at this point in the history
  • Loading branch information
stevenbecht committed Nov 20, 2024
0 parents commit ce41698
Show file tree
Hide file tree
Showing 74 changed files with 10,326 additions and 0 deletions.
1 change: 1 addition & 0 deletions .env.local.example
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
OLLAMA_API_URL=http://10.0.0.40:11434/api/generate
3 changes: 3 additions & 0 deletions .eslintrc.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"extends": ["next/core-web-vitals", "next/typescript"]
}
36 changes: 36 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.

# dependencies
/node_modules
/.pnp
.pnp.js
.yarn/install-state.gz

# testing
/coverage

# next.js
/.next/
/out/

# production
/build

# misc
.DS_Store
*.pem

# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*

# local env files
.env*.local

# vercel
.vercel

# typescript
*.tsbuildinfo
next-env.d.ts
21 changes: 21 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
FROM node:18-alpine

WORKDIR /app

# Copy package files
COPY package*.json ./

# Install Next 14
RUN npm install next@14.2.16

# Install dependencies
RUN npm install

# Copy the rest of the application
COPY . .

# Expose port 3000
EXPOSE 3000

# Run next dev instead of production build
CMD ["npx", "next", "dev"]
32 changes: 32 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# Variables
DOCKER_REPO=docker.io/sbecht/geeko-insurance
TAG=latest

# Default target
.PHONY: all
all: build push

# Build the Docker image
.PHONY: build
build:
docker build -t $(DOCKER_REPO):$(TAG) .

# Push the image to Docker Hub
.PHONY: push
push:
docker push $(DOCKER_REPO):$(TAG)

# Build and push with a specific tag
.PHONY: release
release:
@if [ "$(version)" = "" ]; then \
echo "Please specify a version: make release version=1.0.0"; \
exit 1; \
fi
docker build -t $(DOCKER_REPO):$(version) .
docker push $(DOCKER_REPO):$(version)

# Clean up local Docker images
.PHONY: clean
clean:
docker rmi $(DOCKER_REPO):$(TAG) || true
33 changes: 33 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
# Welcome to Geeko Insurance -- Work in Progress

* WARNING: THIS MIGHT NOT RUN OUT OF THE BOX YET *

# Right now there's a couple ways to run:

git clone https://github.com/sigsteve/geeko-insurance
cd geeko-insurance

# on host outside container
$ npm install
$ npx next dev

# this depends on Ollama setup somewhere
# you'll need a .env.local with an OLLAMA_API_URL set to be your Ollama host
cp .env.local.example .env.local
# edit .env.local and update accordingly

# connect browser to localhost:3000 (or whatever port it says at startup)

# on a local system w/docker
$ docker compose up
# connect browser to localhost:3000

# in k8s - pay attention to Ingress/Service/etc
$ kubectl apply -f k8s/...

# Login with user suse and password heygeeko

# We depend on Ollama having llava:34b and llama3.1:8b models
# Use ollama pull to download them - if you have a GPU with < 15GB VRAM
# then you can pull llava:7b and update the code to use that - slowers
# and less accurate, but responsive ...
80 changes: 80 additions & 0 deletions app/api/analyze-damage/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
import { NextResponse } from 'next/server'

const MAX_RETRIES = 3;

async function tryAnalyzeDamage(base64Image: string, attempt: number = 1): Promise<{
damagedParts: string[],
rawResponse: string
}> {
console.log(`🔄 API Route: Damage analysis attempt ${attempt} of ${MAX_RETRIES}`)

const llmPayload = {
model: "llava:7b",
prompt: "List the damaged parts of this vehicle. Return only a comma-separated list of specific parts. Use standard automotive part names (e.g., front bumper, hood, left fender). Do not include descriptions or additional text.",
images: [base64Image],
stream: false
}

const llmResponse = await fetch(process.env.OLLAMA_API_URL!, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(llmPayload)
})

const llmData = await llmResponse.json()
console.log(`✨ API Route: Raw LLM Response (attempt ${attempt}):`, llmData)

const damagedParts = llmData.response
.split(',')
.map((part: string) => part.trim())
.filter((part: string) => part.length > 0)

return {
damagedParts,
rawResponse: llmData.response
}
}

export async function POST(request: Request) {
try {
console.log('🚀 API Route: Starting damage analysis')

const data = await request.formData()
const file = data.get('image') as File

if (!file) {
console.error('❌ API Route: No image file provided')
return NextResponse.json(
{ error: 'No image file provided' },
{ status: 400 }
)
}

// Convert the file to base64
const bytes = await file.arrayBuffer()
const buffer = Buffer.from(bytes)
const base64Image = buffer.toString('base64')

// Try to analyze damage with retries
let attempt = 1
let result = await tryAnalyzeDamage(base64Image, attempt)

while ((!result.damagedParts || result.damagedParts.length === 0) && attempt < MAX_RETRIES) {
attempt++
console.log(`⚠️ API Route: Invalid response, retrying (attempt ${attempt})`)
result = await tryAnalyzeDamage(base64Image, attempt)
}

console.log('🎯 API Route: Damage analysis result:', result)
return NextResponse.json(result)

} catch (error) {
console.error('💥 API Route: Error:', error)
return NextResponse.json(
{ error: 'Failed to analyze damage' },
{ status: 500 }
)
}
}
13 changes: 13 additions & 0 deletions app/api/auth/check/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import { NextResponse } from 'next/server'
import { cookies } from 'next/headers'

export async function GET() {
const cookieStore = cookies()
const isAuthenticated = cookieStore.get('isAuthenticated')

if (!isAuthenticated) {
return NextResponse.json({ authenticated: false }, { status: 401 })
}

return NextResponse.json({ authenticated: true })
}
22 changes: 22 additions & 0 deletions app/api/auth/login/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import { NextResponse } from 'next/server'
import { cookies } from 'next/headers'

export async function POST(request: Request) {
const body = await request.json()
const { username, password } = body

if (username === 'suse' && password === 'heygeeko') {
// Set HTTP-only cookie
const response = NextResponse.json({ success: true })
response.cookies.set('isAuthenticated', 'true', {
httpOnly: true,
secure: process.env.NODE_ENV === 'production',
sameSite: 'lax',
path: '/',
})

return response
}

return NextResponse.json({ success: false }, { status: 401 })
}
8 changes: 8 additions & 0 deletions app/api/auth/logout/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
import { NextResponse } from 'next/server'

export async function POST() {
const response = NextResponse.json({ success: true })
response.cookies.delete('isAuthenticated')

return response
}
96 changes: 96 additions & 0 deletions app/api/check-damage/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
import { NextResponse } from 'next/server'

const MAX_RETRIES = 3;

async function tryCheckDamage(base64Image: string, attempt: number = 1): Promise<{
hasDamage: boolean,
rawResponse: string,
model: string,
error?: string
}> {
console.log(`🔄 API Route: Damage check attempt ${attempt} of ${MAX_RETRIES}`)

const llmPayload = {
model: "llava:7b",
prompt: "Is there any visible damage to this vehicle? Answer only 'true' if damage is visible, or 'false' if no damage is visible.",
images: [base64Image],
stream: false
}

const llmResponse = await fetch(process.env.OLLAMA_API_URL!, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(llmPayload)
})

const llmData = await llmResponse.json()
console.log(`✨ API Route: Raw LLM Response (attempt ${attempt}):`, llmData)

if (llmData.error) {
console.error('❌ API Route: LLM Error:', llmData.error)
return {
hasDamage: false,
rawResponse: `Error: ${llmData.error}`,
model: llmPayload.model,
error: "Failed to process image. The AI model encountered an error. Please try a different image format or try again later."
}
}

const hasDamage = llmData.response?.toLowerCase().includes('true') ?? false
return {
hasDamage,
rawResponse: llmData.response || 'Error: No response',
model: llmPayload.model
}
}

export async function POST(request: Request) {
try {
console.log('🚀 API Route: Starting damage check process')

const data = await request.formData()
const file = data.get('image') as File

if (!file) {
console.error('❌ API Route: No image file provided')
return NextResponse.json(
{ error: 'No image file provided' },
{ status: 400 }
)
}

// Convert the file to base64
const bytes = await file.arrayBuffer()
const buffer = Buffer.from(bytes)
const base64Image = buffer.toString('base64')

// Try to check damage with retries
let attempt = 1
let result = await tryCheckDamage(base64Image, attempt)

while (result.hasDamage === undefined && attempt < MAX_RETRIES) {
attempt++
console.log(`⚠️ API Route: Invalid response, retrying (attempt ${attempt})`)
result = await tryCheckDamage(base64Image, attempt)
}

console.log('🎯 API Route: Damage check result:', result)
return NextResponse.json({
hasDamage: result.hasDamage,
rawResponse: result.rawResponse,
model: result.model,
error: result.error
})

} catch (error) {
console.error('💥 API Route: Error:', error)
return NextResponse.json({
hasDamage: false,
rawResponse: 'Error occurred during damage check',
model: "llava:7b",
error: "An unexpected error occurred. Please try again later."
})
}
}
7 changes: 7 additions & 0 deletions app/api/config/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
import { NextResponse } from 'next/server'

export async function GET() {
return NextResponse.json({
ollamaApiUrl: process.env.OLLAMA_API_URL
})
}
Loading

0 comments on commit ce41698

Please sign in to comment.