|
| 1 | +# Copyright (C) 2024 Intel Corporation |
| 2 | +# SPDX-License-Identifier: Apache-2.0 |
| 3 | + |
| 4 | +# SPDX-License-Identifier: Apache-2.0 |
| 5 | + |
| 6 | +services: |
| 7 | + search-tei-embedding-service: |
| 8 | + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 |
| 9 | + container_name: search-tei-embedding-server |
| 10 | + ports: |
| 11 | + - "3001:80" |
| 12 | + volumes: |
| 13 | + - "./data:/data" |
| 14 | + shm_size: 1g |
| 15 | + environment: |
| 16 | + no_proxy: ${no_proxy} |
| 17 | + http_proxy: ${http_proxy} |
| 18 | + https_proxy: ${https_proxy} |
| 19 | + HF_TOKEN: ${SEARCH_HUGGINGFACEHUB_API_TOKEN} |
| 20 | + HUGGING_FACE_HUB_TOKEN: ${SEARCH_HUGGINGFACEHUB_API_TOKEN} |
| 21 | + HUGGINGFACEHUB_API_TOKEN: ${SEARCH_HUGGINGFACEHUB_API_TOKEN} |
| 22 | + command: --model-id ${SEARCH_EMBEDDING_MODEL_ID} --auto-truncate |
| 23 | + search-embedding: |
| 24 | + image: ${REGISTRY:-opea}/embedding:${TAG:-latest} |
| 25 | + container_name: search-embedding-server |
| 26 | + depends_on: |
| 27 | + - search-tei-embedding-service |
| 28 | + ports: |
| 29 | + - "3002:6000" |
| 30 | + ipc: host |
| 31 | + environment: |
| 32 | + no_proxy: ${no_proxy} |
| 33 | + http_proxy: ${http_proxy} |
| 34 | + https_proxy: ${https_proxy} |
| 35 | + TEI_EMBEDDING_HOST_IP: ${SEARCH_HOST_IP} |
| 36 | + TEI_EMBEDDING_ENDPOINT: ${SEARCH_TEI_EMBEDDING_ENDPOINT} |
| 37 | + HF_TOKEN: ${SEARCH_HUGGINGFACEHUB_API_TOKEN} |
| 38 | + restart: unless-stopped |
| 39 | + search-web-retriever: |
| 40 | + image: ${REGISTRY:-opea}/web-retriever:${TAG:-latest} |
| 41 | + container_name: search-web-retriever-server |
| 42 | + ports: |
| 43 | + - "3003:7077" |
| 44 | + ipc: host |
| 45 | + environment: |
| 46 | + no_proxy: ${no_proxy} |
| 47 | + http_proxy: ${http_proxy} |
| 48 | + https_proxy: ${https_proxy} |
| 49 | + TEI_EMBEDDING_ENDPOINT: ${SEARCH_TEI_EMBEDDING_ENDPOINT} |
| 50 | + GOOGLE_API_KEY: ${SEARCH_GOOGLE_API_KEY} |
| 51 | + GOOGLE_CSE_ID: ${SEARCH_GOOGLE_CSE_ID} |
| 52 | + restart: unless-stopped |
| 53 | + search-tei-reranking-service: |
| 54 | + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 |
| 55 | + container_name: search-tei-reranking-server |
| 56 | + ports: |
| 57 | + - "3004:80" |
| 58 | + volumes: |
| 59 | + - "./data:/data" |
| 60 | + shm_size: 1g |
| 61 | + environment: |
| 62 | + no_proxy: ${no_proxy} |
| 63 | + http_proxy: ${http_proxy} |
| 64 | + https_proxy: ${https_proxy} |
| 65 | + command: --model-id ${SEARCH_RERANK_MODEL_ID} --auto-truncate |
| 66 | + search-reranking: |
| 67 | + image: ${REGISTRY:-opea}/reranking:${TAG:-latest} |
| 68 | + container_name: search-reranking-server |
| 69 | + depends_on: |
| 70 | + - search-tei-reranking-service |
| 71 | + ports: |
| 72 | + - "3005:8000" |
| 73 | + ipc: host |
| 74 | + environment: |
| 75 | + no_proxy: ${no_proxy} |
| 76 | + http_proxy: ${http_proxy} |
| 77 | + https_proxy: ${https_proxy} |
| 78 | + TEI_RERANKING_ENDPOINT: ${SEARCH_TEI_RERANKING_ENDPOINT} |
| 79 | + HF_TOKEN: ${SEARCH_HUGGINGFACEHUB_API_TOKEN} |
| 80 | + HUGGING_FACE_HUB_TOKEN: ${SEARCH_HUGGINGFACEHUB_API_TOKEN} |
| 81 | + HUGGINGFACEHUB_API_TOKEN: ${SEARCH_HUGGINGFACEHUB_API_TOKEN} |
| 82 | + restart: unless-stopped |
| 83 | + search-tgi-service: |
| 84 | + image: ghcr.io/huggingface/text-generation-inference:2.3.1-rocm |
| 85 | + container_name: search-tgi-service |
| 86 | + ports: |
| 87 | + - "3006:80" |
| 88 | + volumes: |
| 89 | + - "./data:/data" |
| 90 | + environment: |
| 91 | + no_proxy: ${no_proxy} |
| 92 | + http_proxy: ${http_proxy} |
| 93 | + https_proxy: ${https_proxy} |
| 94 | + HUGGING_FACE_HUB_TOKEN: ${SEARCH_HUGGINGFACEHUB_API_TOKEN} |
| 95 | + HUGGINGFACEHUB_API_TOKEN: ${SEARCH_HUGGINGFACEHUB_API_TOKEN} |
| 96 | + shm_size: 1g |
| 97 | + devices: |
| 98 | + - /dev/kfd:/dev/kfd |
| 99 | + - /dev/dri/:/dev/dri/ |
| 100 | + cap_add: |
| 101 | + - SYS_PTRACE |
| 102 | + group_add: |
| 103 | + - video |
| 104 | + security_opt: |
| 105 | + - seccomp:unconfined |
| 106 | + ipc: host |
| 107 | + command: --model-id ${SEARCH_LLM_MODEL_ID} --max-input-length 1024 --max-total-tokens 2048 |
| 108 | + search-llm: |
| 109 | + image: ${REGISTRY:-opea}/llm-textgen:${TAG:-latest} |
| 110 | + container_name: search-llm-server |
| 111 | + depends_on: |
| 112 | + - search-tgi-service |
| 113 | + ports: |
| 114 | + - "3007:9000" |
| 115 | + ipc: host |
| 116 | + environment: |
| 117 | + no_proxy: ${no_proxy} |
| 118 | + http_proxy: ${http_proxy} |
| 119 | + https_proxy: ${https_proxy} |
| 120 | + TGI_LLM_ENDPOINT: ${SEARCH_TGI_LLM_ENDPOINT} |
| 121 | + HUGGINGFACEHUB_API_TOKEN: ${SEARCH_HUGGINGFACEHUB_API_TOKEN} |
| 122 | + LLM_ENDPOINT: ${SEARCH_TGI_LLM_ENDPOINT} |
| 123 | + LLM_MODEL_ID: ${SEARCH_LLM_MODEL_ID} |
| 124 | + LLM_MODEL: ${SEARCH_LLM_MODEL_ID} |
| 125 | + HF_TOKEN: ${SEARCH_HUGGINGFACEHUB_API_TOKEN} |
| 126 | + OPENAI_API_KEY: ${SEARCH_OPENAI_API_KEY} |
| 127 | + restart: unless-stopped |
| 128 | + search-backend-server: |
| 129 | + image: ${REGISTRY:-opea}/searchqna:${TAG:-latest} |
| 130 | + container_name: search-backend-server |
| 131 | + depends_on: |
| 132 | + - search-tei-embedding-service |
| 133 | + - search-embedding |
| 134 | + - search-web-retriever |
| 135 | + - search-tei-reranking-service |
| 136 | + - search-reranking |
| 137 | + - search-tgi-service |
| 138 | + - search-llm |
| 139 | + ports: |
| 140 | + - "${SEARCH_BACKEND_SERVICE_PORT:-3008}:8888" |
| 141 | + environment: |
| 142 | + - no_proxy=${no_proxy} |
| 143 | + - https_proxy=${https_proxy} |
| 144 | + - http_proxy=${http_proxy} |
| 145 | + - MEGA_SERVICE_HOST_IP=${SEARCH_MEGA_SERVICE_HOST_IP} |
| 146 | + - EMBEDDING_SERVICE_HOST_IP=${SEARCH_EMBEDDING_SERVICE_HOST_IP} |
| 147 | + - WEB_RETRIEVER_SERVICE_HOST_IP=${SEARCH_WEB_RETRIEVER_SERVICE_HOST_IP} |
| 148 | + - RERANK_SERVICE_HOST_IP=${SEARCH_RERANK_SERVICE_HOST_IP} |
| 149 | + - LLM_SERVICE_HOST_IP=${SEARCH_LLM_SERVICE_HOST_IP} |
| 150 | + - EMBEDDING_SERVICE_PORT=${SEARCH_EMBEDDING_SERVICE_PORT} |
| 151 | + - WEB_RETRIEVER_SERVICE_PORT=${SEARCH_WEB_RETRIEVER_SERVICE_PORT} |
| 152 | + - RERANK_SERVICE_PORT=${SEARCH_RERANK_SERVICE_PORT} |
| 153 | + - LLM_SERVICE_PORT=${SEARCH_LLM_SERVICE_PORT} |
| 154 | + ipc: host |
| 155 | + restart: always |
| 156 | + search-ui-server: |
| 157 | + image: ${REGISTRY:-opea}/searchqna-ui:${TAG:-latest} |
| 158 | + container_name: search-ui-server |
| 159 | + depends_on: |
| 160 | + - search-backend-server |
| 161 | + ports: |
| 162 | + - "${SEARCH_FRONTEND_SERVICE_PORT:-5173}:5173" |
| 163 | + environment: |
| 164 | + - no_proxy=${no_proxy} |
| 165 | + - https_proxy=${https_proxy} |
| 166 | + - http_proxy=${http_proxy} |
| 167 | + - BACKEND_BASE_URL=${SEARCH_BACKEND_SERVICE_ENDPOINT} |
| 168 | + ipc: host |
| 169 | + restart: always |
| 170 | + |
| 171 | +networks: |
| 172 | + default: |
| 173 | + driver: bridge |
0 commit comments