-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathquickstart.sh
More file actions
executable file
Β·135 lines (118 loc) Β· 4.06 KB
/
quickstart.sh
File metadata and controls
executable file
Β·135 lines (118 loc) Β· 4.06 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
#!/bin/bash
# Quick Start Script for LLM Data Extractor (Ollama-powered)
# This script helps you get started quickly with FREE local models
set -e # Exit on error
echo "π LLM Data Extractor - Quick Start (Ollama)"
echo "=============================================="
echo ""
# Check if Ollama is installed
if ! command -v ollama &> /dev/null; then
echo "β Ollama is not installed!"
echo ""
echo "Please install Ollama first:"
echo " macOS/Linux: curl -fsSL https://ollama.ai/install.sh | sh"
echo " macOS (Homebrew): brew install ollama"
echo " Or visit: https://ollama.ai"
echo ""
exit 1
fi
echo "β Ollama is installed"
echo ""
# Check if Ollama is running
if ! curl -s http://localhost:11434/api/tags > /dev/null 2>&1; then
echo "β οΈ Ollama server is not running!"
echo ""
echo "Starting Ollama in background..."
ollama serve > /dev/null 2>&1 &
sleep 3
if curl -s http://localhost:11434/api/tags > /dev/null 2>&1; then
echo "β Ollama server started"
else
echo "β Could not start Ollama server"
echo "Please run 'ollama serve' in a separate terminal and try again"
exit 1
fi
else
echo "β Ollama server is running"
fi
echo ""
# Check if llama3.2 model is available
if ! ollama list | grep -q "llama3.2"; then
echo "β οΈ llama3.2 model not found"
echo ""
echo "Pulling llama3.2 model (this may take a few minutes)..."
ollama pull llama3.2
echo "β Model downloaded"
else
echo "β llama3.2 model is available"
fi
echo ""
# Check Python version
echo "Checking Python version..."
python_version=$(python3 --version 2>&1 | awk '{print $2}')
echo "β Found Python $python_version"
echo ""
# Check if virtual environment exists
if [ ! -d "venv" ]; then
echo "Creating virtual environment..."
python3 -m venv venv
echo "β Virtual environment created"
else
echo "β Virtual environment already exists"
fi
echo ""
# Activate virtual environment
echo "Activating virtual environment..."
source venv/bin/activate
echo "β Virtual environment activated"
echo ""
# Install dependencies
echo "Installing dependencies..."
pip install -q --upgrade pip
pip install -q -r requirements.txt
echo "β Dependencies installed"
echo ""
# Check for .env file
if [ ! -f ".env" ]; then
echo "Creating .env from template..."
cp .env.example .env
echo "β .env file created (using defaults)"
else
echo "β .env file already exists"
fi
echo ""
# Create output directory
mkdir -p output logs
echo "β Created output and logs directories"
echo ""
# Run example
echo "ββββββββββββββββββββββββββββββββββββββββββββββ"
echo "π― Running example extraction..."
echo "ββββββββββββββββββββββββββββββββββββββββββββββ"
echo ""
python3 cli.py extract \
--input sample_inputs/invoice_tech.txt \
--type invoice \
--output output/quickstart_invoice.json
echo ""
echo "ββββββββββββββββββββββββββββββββββββββββββββββ"
echo "β
Setup complete!"
echo "ββββββββββββββββββββββββββββββββββββββββββββββ"
echo ""
echo "Next steps:"
echo " 1. Check output/quickstart_invoice.json"
echo " 2. View logs in logs/ directory"
echo " 3. Try other examples:"
echo ""
echo " python3 cli.py extract --input sample_inputs/email_project.txt --type email"
echo " python3 cli.py extract --input sample_inputs/support_ticket_urgent.txt --type support_ticket"
echo ""
echo " 4. See all options:"
echo " python3 cli.py extract --help"
echo " python3 cli.py list-schemas"
echo ""
echo " 5. Try different models:"
echo " ollama pull llama3.1"
echo " python3 cli.py extract --input sample_inputs/invoice_tech.txt --type invoice --model llama3.1"
echo ""
echo "Happy extracting with FREE local models! π"