-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathsetup.sh
More file actions
executable file
·160 lines (132 loc) · 4.67 KB
/
setup.sh
File metadata and controls
executable file
·160 lines (132 loc) · 4.67 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
#!/bin/bash
# @Author: ArthurBernard
# @Email: arthur.bernard.92@gmail.com
# @Date: 2024-11-20 10:56:00
# @Last modified by: ArthurBernard
# @Last modified time: 2024-11-22 10:15:37
# Stop on errors
set -e
# Colors for output
GREEN='\033[0;32m'
RED='\033[0;31m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Default to GPU installation
CPU_ONLY=false
# Parse command-line arguments
while [[ "$#" -gt 0 ]]; do
case $1 in
--cpu-only) CPU_ONLY=true ;;
*) echo -e "${RED}Unknown parameter: $1${NC}" && exit 1 ;;
esac
shift
done
# Function to install PyTorch and LLaMa C++
install_pytorch_llamacpp() {
if [ "$CPU_ONLY" = true ]; then
echo -e "${YELLOW}Installing PyTorch and llama-cpp-python for CPU...${NC}"
pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu
pip install llama-cpp-python
else
if command -v nvidia-smi &>/dev/null; then
echo -e "${YELLOW}Installing PyTorch with GPU support...${NC}"
pip install torch torchvision torchaudio
pip install llama-cpp-python --extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cu124
else
echo -e "${RED}GPU not detected. Falling back to CPU installation.${NC}"
pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu
pip install llama-cpp-python
fi
fi
}
# Function to clone repo
check_or_clone_repo() {
REPO_URL=$1
REPO_DIR=$2
if [ -d "$REPO_DIR" ]; then
echo -e "${GREEN}Repository $REPO_DIR already exists.${NC}"
echo -e "${YELLOW}Pulling latest changes...${NC}"
cd "$REPO_DIR" && git pull && cd ..
else
echo -e "${YELLOW}Cloning repository $REPO_URL...${NC}"
git clone "$REPO_URL"
fi
}
# Function to create directory
check_or_create_dir() {
DIR=$1
if [ -d "$DIR" ]; then
echo -e "${GREEN}Directory $DIR already exists. Skipping creation.${NC}"
else
echo -e "${YELLOW}Creating directory $DIR...${NC}"
mkdir -p "$DIR"
fi
}
# Create logs directory
mkdir -p "./_conv_history"
LOG_DIR="./src/logs"
if [ ! -d "$LOG_DIR" ]; then
echo -e "${YELLOW}Directory $LOG_DIR does not exist. Creating it...${NC}"
mkdir -p "$LOG_DIR"
else
echo -e "${GREEN}Directory $LOG_DIR already exists.${NC}"
fi
# Update system
echo -e "${YELLOW}Updating system...${NC}"
sudo apt update
sudo apt upgrade -y
# Install basic dependencies
echo -e "${YELLOW}Installing system dependencies...${NC}"
sudo apt install -y python3 python3-pip python3-venv git nginx certbot python3-certbot-nginx
# Setup Python virtual environment
echo -e "${YELLOW}Setting up Python virtual environment...${NC}"
python3 -m venv ~/venv
source ~/venv/bin/activate
# Install PyTorch and LLaMa C++
install_pytorch_llamacpp
# Clone repositories
echo -e "${YELLOW}Cloning necessary repositories...${NC}"
check_or_clone_repo "https://github.com/LLM-Solution/PyLLMSol.git" "PyLLMSol"
check_or_clone_repo "https://github.com/ggerganov/llama.cpp.git" "llama.cpp"
# Install PyLLMSol
echo -e "${YELLOW}Installing PyLLMSol and llama-cpp-python...${NC}"
pip install -e PyLLMSol/.
# Install requirements
echo -e "${YELLOW}Installing Python dependencies...${NC}"
pip install -r requirements.txt
# Create logs directory
check_or_create_dir "~/MiniChatBot/src/logs"
# Install Gunicorn
echo -e "${YELLOW}Installing Gunicorn...${NC}"
pip install gunicorn
# Prompt user for hostname
read -p "Enter your API hostname (e.g., api.example.com): " HOSTNAME
if [ -z "$HOSTNAME" ]; then
echo -e "${RED}Hostname cannot be empty. Exiting.${NC}"
exit 1
fi
# Define the configuration file name based on the hostname
CONFIG_FILE="/etc/nginx/sites-available/$HOSTNAME"
# Replace placeholder in Nginx configuration
echo -e "${YELLOW}Configuring Nginx with hostname: ${HOSTNAME}${NC}"
sed "s/server_name api.example.com;/server_name $HOSTNAME;/" api.example.com.template > "$HOSTNAME"
# Copy and enable the Nginx configuration
echo -e "${YELLOW}Copying configuration to Nginx directory...${NC}"
sudo mv "$HOSTNAME" "$CONFIG_FILE"
sudo ln -sf "$CONFIG_FILE" /etc/nginx/sites-enabled/
# Test and restart Nginx
if ! sudo nginx -t; then
echo -e "${RED}Nginx configuration test failed. Check your configuration.${NC}"
exit 1
fi
sudo systemctl restart nginx
# Add SSL Certificate
sudo certbot --nginx -d "${HOSTNAME}"
# Final message
echo -e "${GREEN}Setup complete.${NC}"
echo -e "${BLUE}Please copy model weights and start the server as described in the documentation.${NC}"
if ! $CPU_ONLY && ! command -v nvidia-smi &>/dev/null; then
echo -e "${YELLOW}If you plan to use GPU acceleration, please install GPU drivers and reboot your system.${NC}"
echo -e "${YELLOW}Refer to the README for detailed instructions.${NC}"
fi