mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2026-04-22 16:07:51 +08:00
[Feature] Enhance Router with /v1/completions, docs, scripts, and version info (#5966)
* [Doc] Update prerequisites in the documentation * [Feature] Enhance Router with /v1/completions, docs, scripts, and version info * [Feature] Enhance Router with /v1/completions, docs, scripts, and version info --------- Co-authored-by: mouxin <mouxin@baidu.com>
This commit is contained in:
@@ -0,0 +1,109 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Test mixed server + router
|
||||
|
||||
# prepare environment
|
||||
export MODEL_NAME="PaddlePaddle/ERNIE-4.5-0.3B-Paddle"
|
||||
export FD_DEBUG=1
|
||||
|
||||
unset http_proxy && unset https_proxy
|
||||
rm -rf log_*
|
||||
source ./utils.sh
|
||||
|
||||
S1_PORT=52400
|
||||
S2_PORT=52500
|
||||
ROUTER_PORT=52600
|
||||
|
||||
FD_BIN_DIR="/usr/local/bin"
|
||||
FD_ROUTER_BIN="${FD_BIN_DIR}/fd-router"
|
||||
FD_ROUTER_URL="https://paddle-qa.bj.bcebos.com/FastDeploy/fd-router"
|
||||
FD_ROUTER_SHA256="67640aaeebdd886826d3534930b2154cd2c1441a26bc3f38c3af5f0aadba7c2d"
|
||||
|
||||
ports=(
|
||||
$S1_PORT $((S1_PORT + 1)) $((S1_PORT + 2)) $((S1_PORT + 3))
|
||||
$S2_PORT $((S2_PORT + 1)) $((S2_PORT + 2)) $((S2_PORT + 3))
|
||||
$ROUTER_PORT
|
||||
)
|
||||
check_ports "${ports[@]}" || {
|
||||
echo "❌ Some ports are in use. Please release them."
|
||||
exit 1
|
||||
}
|
||||
|
||||
# check fd-router binary
|
||||
if [ ! -x "${FD_ROUTER_BIN}" ]; then
|
||||
echo "⚠️ fd-router not found, downloading..."
|
||||
|
||||
mkdir -p "${FD_BIN_DIR}"
|
||||
TMP_BIN="${FD_ROUTER_BIN}.tmp"
|
||||
|
||||
wget -q --no-proxy "${FD_ROUTER_URL}" -O "${TMP_BIN}" || exit 1
|
||||
|
||||
echo "${FD_ROUTER_SHA256} ${TMP_BIN}" | sha256sum -c - || {
|
||||
echo "❌ Integrity check failed"
|
||||
rm -f "${TMP_BIN}"
|
||||
exit 1
|
||||
}
|
||||
|
||||
mv "${TMP_BIN}" "${FD_ROUTER_BIN}"
|
||||
chmod +x "${FD_ROUTER_BIN}"
|
||||
|
||||
echo "fd-router installed and verified"
|
||||
else
|
||||
echo "fd-router already exists"
|
||||
fi
|
||||
|
||||
# start router
|
||||
export FD_LOG_DIR="log_router"
|
||||
mkdir -p ${FD_LOG_DIR}
|
||||
|
||||
nohup /usr/local/bin/fd-router \
|
||||
--port ${ROUTER_PORT} \
|
||||
2>&1 >${FD_LOG_DIR}/nohup &
|
||||
|
||||
# start modelserver 0
|
||||
export CUDA_VISIBLE_DEVICES=0
|
||||
export FD_LOG_DIR="log_server_0"
|
||||
mkdir -p ${FD_LOG_DIR}
|
||||
|
||||
nohup python -m fastdeploy.entrypoints.openai.api_server \
|
||||
--model ${MODEL_NAME} \
|
||||
--port ${S1_PORT} \
|
||||
--metrics-port $((S1_PORT + 1)) \
|
||||
--engine-worker-queue-port $((S1_PORT + 2)) \
|
||||
--cache-queue-port $((S1_PORT + 3)) \
|
||||
--max-model-len 32768 \
|
||||
--router "0.0.0.0:${ROUTER_PORT}" \
|
||||
2>&1 >${FD_LOG_DIR}/nohup &
|
||||
|
||||
wait_for_health ${S1_PORT}
|
||||
|
||||
# start modelserver 1
|
||||
export CUDA_VISIBLE_DEVICES=1
|
||||
export FD_LOG_DIR="log_server_1"
|
||||
mkdir -p ${FD_LOG_DIR}
|
||||
|
||||
nohup python -m fastdeploy.entrypoints.openai.api_server \
|
||||
--model ${MODEL_NAME} \
|
||||
--port ${S2_PORT} \
|
||||
--metrics-port $((S2_PORT + 1)) \
|
||||
--engine-worker-queue-port $((S2_PORT + 2)) \
|
||||
--cache-queue-port $((S2_PORT + 3)) \
|
||||
--max-model-len 32768 \
|
||||
--router "0.0.0.0:${ROUTER_PORT}" \
|
||||
2>&1 >${FD_LOG_DIR}/nohup &
|
||||
|
||||
wait_for_health ${S2_PORT}
|
||||
|
||||
# send request
|
||||
sleep 10 # make sure server is registered to router
|
||||
echo "send request..."
|
||||
curl -X POST "http://0.0.0.0:${ROUTER_PORT}/v1/chat/completions" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"messages": [
|
||||
{"role": "user", "content": "hello"}
|
||||
],
|
||||
"max_tokens": 20,
|
||||
"stream": false
|
||||
}'
|
||||
@@ -0,0 +1,81 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Test mixed server + router
|
||||
|
||||
# prepare environment
|
||||
export MODEL_NAME="PaddlePaddle/ERNIE-4.5-0.3B-Paddle"
|
||||
export FD_DEBUG=1
|
||||
|
||||
unset http_proxy && unset https_proxy
|
||||
rm -rf log_*
|
||||
source ./utils.sh
|
||||
|
||||
S1_PORT=52400
|
||||
S2_PORT=52500
|
||||
ROUTER_PORT=52600
|
||||
|
||||
ports=(
|
||||
$S1_PORT $((S1_PORT + 1)) $((S1_PORT + 2)) $((S1_PORT + 3))
|
||||
$S2_PORT $((S2_PORT + 1)) $((S2_PORT + 2)) $((S2_PORT + 3))
|
||||
$ROUTER_PORT
|
||||
)
|
||||
check_ports "${ports[@]}" || {
|
||||
echo "❌ Some ports are in use. Please release them."
|
||||
exit 1
|
||||
}
|
||||
|
||||
# start router
|
||||
export FD_LOG_DIR="log_router"
|
||||
mkdir -p ${FD_LOG_DIR}
|
||||
|
||||
nohup python -m fastdeploy.router.launch \
|
||||
--port ${ROUTER_PORT} \
|
||||
2>&1 >${FD_LOG_DIR}/nohup &
|
||||
|
||||
# start modelserver 0
|
||||
export CUDA_VISIBLE_DEVICES=0
|
||||
export FD_LOG_DIR="log_server_0"
|
||||
mkdir -p ${FD_LOG_DIR}
|
||||
|
||||
nohup python -m fastdeploy.entrypoints.openai.api_server \
|
||||
--model ${MODEL_NAME} \
|
||||
--port ${S1_PORT} \
|
||||
--metrics-port $((S1_PORT + 1)) \
|
||||
--engine-worker-queue-port $((S1_PORT + 2)) \
|
||||
--cache-queue-port $((S1_PORT + 3)) \
|
||||
--max-model-len 32768 \
|
||||
--router "0.0.0.0:${ROUTER_PORT}" \
|
||||
2>&1 >${FD_LOG_DIR}/nohup &
|
||||
|
||||
wait_for_health ${S1_PORT}
|
||||
|
||||
# start modelserver 1
|
||||
export CUDA_VISIBLE_DEVICES=1
|
||||
export FD_LOG_DIR="log_server_1"
|
||||
mkdir -p ${FD_LOG_DIR}
|
||||
|
||||
nohup python -m fastdeploy.entrypoints.openai.api_server \
|
||||
--model ${MODEL_NAME} \
|
||||
--port ${S2_PORT} \
|
||||
--metrics-port $((S2_PORT + 1)) \
|
||||
--engine-worker-queue-port $((S2_PORT + 2)) \
|
||||
--cache-queue-port $((S2_PORT + 3)) \
|
||||
--max-model-len 32768 \
|
||||
--router "0.0.0.0:${ROUTER_PORT}" \
|
||||
2>&1 >${FD_LOG_DIR}/nohup &
|
||||
|
||||
wait_for_health ${S2_PORT}
|
||||
|
||||
# send request
|
||||
sleep 10 # make sure server is registered to router
|
||||
echo "send request..."
|
||||
curl -X POST "http://0.0.0.0:${ROUTER_PORT}/v1/chat/completions" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"messages": [
|
||||
{"role": "user", "content": "hello"}
|
||||
],
|
||||
"max_tokens": 20,
|
||||
"stream": false
|
||||
}'
|
||||
@@ -0,0 +1,73 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# prepare environment
|
||||
export MODEL_NAME="PaddlePaddle/ERNIE-4.5-0.3B-Paddle"
|
||||
export FD_DEBUG=1
|
||||
|
||||
SCRIPT_PATH=$(readlink -f "$0")
|
||||
SCRIPT_DIR=$(dirname "$SCRIPT_PATH")
|
||||
source ${SCRIPT_DIR}/utils.sh
|
||||
|
||||
unset http_proxy && unset https_proxy
|
||||
|
||||
P_PORT=52400
|
||||
D_PORT=52500
|
||||
ROUTER_PORT=52700
|
||||
LOG_DATE=$(date +%Y%m%d_%H%M%S)
|
||||
|
||||
ports=($P_PORT $D_PORT $ROUTER_PORT)
|
||||
check_ports "${ports[@]}" || {
|
||||
echo "❌ Some ports are in use. Please release them."
|
||||
exit 1
|
||||
}
|
||||
|
||||
# start router
|
||||
export FD_LOG_DIR="log/$LOG_DATE/router"
|
||||
rm -rf ${FD_LOG_DIR} && mkdir -p ${FD_LOG_DIR}
|
||||
|
||||
nohup python -m fastdeploy.router.launch \
|
||||
--port ${ROUTER_PORT} \
|
||||
--splitwise \
|
||||
2>&1 >${FD_LOG_DIR}/nohup &
|
||||
|
||||
# start prefill
|
||||
export CUDA_VISIBLE_DEVICES=0
|
||||
export FD_LOG_DIR="log/$LOG_DATE/prefill"
|
||||
rm -rf ${FD_LOG_DIR} && mkdir -p ${FD_LOG_DIR}
|
||||
|
||||
nohup python -m fastdeploy.entrypoints.openai.api_server \
|
||||
--model ${MODEL_NAME} \
|
||||
--port "${P_PORT}" \
|
||||
--splitwise-role "prefill" \
|
||||
--router "0.0.0.0:${ROUTER_PORT}" \
|
||||
2>&1 >${FD_LOG_DIR}/nohup &
|
||||
|
||||
wait_for_health ${P_PORT}
|
||||
|
||||
# start decode
|
||||
export CUDA_VISIBLE_DEVICES=1
|
||||
export FD_LOG_DIR="log/$LOG_DATE/decode"
|
||||
rm -rf ${FD_LOG_DIR} && mkdir -p ${FD_LOG_DIR}
|
||||
|
||||
nohup python -m fastdeploy.entrypoints.openai.api_server \
|
||||
--model ${MODEL_NAME} \
|
||||
--port "${D_PORT}" \
|
||||
--splitwise-role "decode" \
|
||||
--router "0.0.0.0:${ROUTER_PORT}" \
|
||||
2>&1 >${FD_LOG_DIR}/nohup &
|
||||
|
||||
wait_for_health ${D_PORT}
|
||||
|
||||
# send request
|
||||
sleep 10 # make sure server is registered to router
|
||||
echo "send request..."
|
||||
curl -X POST "http://0.0.0.0:${ROUTER_PORT}/v1/chat/completions" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"messages": [
|
||||
{"role": "user", "content": "hello"}
|
||||
],
|
||||
"max_tokens": 100,
|
||||
"stream": false
|
||||
}'
|
||||
@@ -0,0 +1,99 @@
|
||||
#!/bin/bash
|
||||
|
||||
is_port_free() {
|
||||
local port=$1
|
||||
if ss -ltun | awk '{print $4}' | grep -q ":${port}$"; then
|
||||
return 1 # Port is occupied
|
||||
fi
|
||||
return 0 # Port is free
|
||||
}
|
||||
|
||||
check_ports() {
|
||||
for port in "$@"; do
|
||||
if ! is_port_free $port; then
|
||||
echo "❌ Port $port is already in use"
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
return 0
|
||||
}
|
||||
|
||||
wait_for_health() {
|
||||
IFS=',' read -r -a server_ports <<< "$1"
|
||||
local num_ports=${#server_ports[@]}
|
||||
local total_lines=$((num_ports + 1))
|
||||
local first_run=true
|
||||
local GREEN='\033[0;32m'
|
||||
local RED='\033[0;31m'
|
||||
local NC='\033[0m' # No Color
|
||||
local start_time=$(date +%s)
|
||||
|
||||
echo "-------- WAIT FOR HEALTH --------"
|
||||
while true; do
|
||||
local all_ready=true
|
||||
for port in "${server_ports[@]}"; do
|
||||
status_code=$(curl -s --max-time 1 -o /dev/null -w "%{http_code}" "http://0.0.0.0:${port}/health" || echo "000")
|
||||
if [ "$status_code" -eq 200 ]; then
|
||||
printf "Port %s: ${GREEN}[OK] 200${NC}\033[K\n" "$port"
|
||||
else
|
||||
all_ready=false
|
||||
printf "Port %s: ${RED}[WAIT] %s${NC}\033[K\n" "$port" "$status_code"
|
||||
fi
|
||||
done
|
||||
cur_time=$(date +%s)
|
||||
if [ "$all_ready" = "true" ]; then
|
||||
echo "All services are ready! [$((cur_time-start_time))s]"
|
||||
break
|
||||
else
|
||||
echo "Services not ready.. [$((cur_time-start_time))s]"
|
||||
printf "\033[%dA" "$total_lines" # roll back cursor
|
||||
sleep 1
|
||||
fi
|
||||
done
|
||||
echo "---------------------------------"
|
||||
}
|
||||
|
||||
get_free_ports() {
|
||||
free_ports_num=${1:-1}
|
||||
start_port=${2:-8000}
|
||||
end_port=${3:-9000}
|
||||
|
||||
free_ports=()
|
||||
if [[ ! -n ${free_ports_num} || "${free_ports_num}" -le 0 ]]; then
|
||||
log_warn "param can't be empty, and should > 0"
|
||||
echo ${free_ports[@]}
|
||||
return 1
|
||||
fi
|
||||
|
||||
used_ports1=$(netstat -an | grep -E "(0.0.0.0|127.0.0.1|${POD_IP}|tcp6)" | awk '{n=split($4,a,":"); if(a[n]~/^[0-9]+$/) print a[n];}' | sort -u)
|
||||
used_ports2=$(netstat -an | grep -E "(0.0.0.0|127.0.0.1|${POD_IP}|tcp6)" | awk '{n=split($5,a,":"); if(a[n]~/^[0-9]+$/) print a[n];}' | sort -u)
|
||||
all_used_ports=$(printf "%s\n" "${used_ports1}" "${used_ports2}" | sort -u)
|
||||
|
||||
# Generate random number between 0 and 32767
|
||||
random_num=$(( RANDOM ))
|
||||
port=$(( random_num % (end_port - start_port + 1) + start_port ))
|
||||
|
||||
while true; do
|
||||
(( port++ ))
|
||||
if [[ ${port} -ge ${end_port} ]]; then
|
||||
port=${start_port}
|
||||
fi
|
||||
|
||||
if [[ "${all_used_ports[@]}" =~ "${port}" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
if is_port_free ${port}; then
|
||||
free_ports+=("${port}")
|
||||
(( free_ports_num-- ))
|
||||
if [[ ${free_ports_num} = 0 ]]; then
|
||||
break
|
||||
fi
|
||||
fi
|
||||
|
||||
done
|
||||
|
||||
# echo ${free_ports[@]}
|
||||
IFS=',' && echo "${free_ports[*]}"
|
||||
return 0
|
||||
}
|
||||
Reference in New Issue
Block a user