Skip to content

Commit 856fde6

Browse files
committed
Merge branch 'main' into yogesh/docker
Signed-off-by: Yogesh Pandey <yogesh.pandey@intel.com>
2 parents df7543d + 5c1d669 commit 856fde6

19 files changed

+203
-248
lines changed

.github/workflows/_image-build.yml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,10 @@ jobs:
3434
image_repo: ${{ steps.build-megaservice-image.outputs.image_repo }}
3535
image_tag: ${{ steps.build-megaservice-image.outputs.image_tag }}
3636
steps:
37+
- name: Clean up Working Directory
38+
run: |
39+
sudo rm -rf ${{github.workspace}}/* || true
40+
3741
- name: Get checkout ref
3842
run: |
3943
if [ "${{ github.event_name }}" == "pull_request" ] || [ "${{ github.event_name }}" == "pull_request_target" ]; then
@@ -62,3 +66,4 @@ jobs:
6266
fi
6367
echo "IMAGE_TAG=${IMAGE_TAG}"
6468
echo "image_tag=$IMAGE_TAG" >> $GITHUB_OUTPUT
69+
echo "image_repo=${IMAGE_REPO}" >> $GITHUB_OUTPUT
Lines changed: 111 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,111 @@
1+
# Copyright (C) 2024 Intel Corporation
2+
# SPDX-License-Identifier: Apache-2.0
3+
4+
name: Image Build
5+
permissions: read-all
6+
on:
7+
workflow_call:
8+
inputs:
9+
registry:
10+
description: Container Registry URL
11+
required: false
12+
default: ""
13+
type: string
14+
tag:
15+
description: Container Tag
16+
required: false
17+
default: "latest"
18+
type: string
19+
example:
20+
description: Example to test
21+
required: true
22+
type: string
23+
hardware:
24+
description: Hardware to run the test on
25+
required: true
26+
type: string
27+
jobs:
28+
get-test-case:
29+
runs-on: ubuntu-latest
30+
outputs:
31+
test_cases: ${{ steps.test-case-matrix.outputs.test_cases }}
32+
CHECKOUT_REF: ${{ steps.get-checkout-ref.outputs.CHECKOUT_REF }}
33+
steps:
34+
- name: Get checkout ref
35+
id: get-checkout-ref
36+
run: |
37+
if [ "${{ github.event_name }}" == "pull_request" ] || [ "${{ github.event_name }}" == "pull_request_target" ]; then
38+
CHECKOUT_REF=refs/pull/${{ github.event.number }}/merge
39+
else
40+
CHECKOUT_REF=${{ github.ref }}
41+
fi
42+
echo "CHECKOUT_REF=${CHECKOUT_REF}" >> $GITHUB_OUTPUT
43+
echo "checkout ref ${CHECKOUT_REF}"
44+
45+
- name: Checkout out Repo
46+
uses: actions/checkout@v4
47+
with:
48+
ref: ${{ steps.get-checkout-ref.outputs.CHECKOUT_REF }}
49+
fetch-depth: 0
50+
51+
- name: Get test matrix
52+
shell: bash
53+
id: test-case-matrix
54+
run: |
55+
set -x
56+
example_l=$(echo ${{ inputs.example }} | tr '[:upper:]' '[:lower:]')
57+
cd ${{ github.workspace }}/${{ inputs.example }}/tests
58+
test_cases=$(find . -type f -name "test_${example_l}*on_${{ inputs.hardware }}.sh" -print | cut -d/ -f2 | jq -R '.' | jq -sc '.')
59+
echo "test_cases=$test_cases" >> $GITHUB_OUTPUT
60+
61+
run-test:
62+
needs: [get-test-case]
63+
strategy:
64+
matrix:
65+
test_case: ${{ fromJSON(needs.get-test-case.outputs.test_cases) }}
66+
runs-on: ${{ inputs.hardware }}
67+
continue-on-error: true
68+
steps:
69+
- name: Clean up Working Directory
70+
run: |
71+
sudo rm -rf ${{github.workspace}}/* || true
72+
docker system prune -f
73+
docker rmi $(docker images --filter reference="*/*/*:latest" -q) || true
74+
75+
- name: Checkout out Repo
76+
uses: actions/checkout@v4
77+
with:
78+
ref: ${{ needs.get-test-case.outputs.CHECKOUT_REF }}
79+
fetch-depth: 0
80+
81+
- name: Run test
82+
shell: bash
83+
env:
84+
HUGGINGFACEHUB_API_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }}
85+
GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }}
86+
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
87+
IMAGE_REPO: ${{ inputs.registry }}
88+
IMAGE_TAG: ${{ inputs.tag }}
89+
example: ${{ inputs.example }}
90+
hardware: ${{ inputs.hardware }}
91+
test_case: ${{ matrix.test_case }}
92+
run: |
93+
cd ${{ github.workspace }}/$example/tests
94+
export IMAGE_REPO=${OPEA_IMAGE_REPO}
95+
if [ -f ${test_case} ]; then timeout 30m bash ${test_case}; else echo "Test script {${test_case}} not found, skip test!"; fi
96+
97+
- name: Clean up container
98+
shell: bash
99+
if: cancelled() || failure()
100+
run: |
101+
cd ${{ github.workspace }}/${{ inputs.example }}/docker/${{ inputs.hardware }}
102+
docker compose stop && docker compose rm -f
103+
docker system prune -f
104+
docker rmi $(docker images --filter reference="*:5000/*/*" -q) || true
105+
106+
- name: Publish pipeline artifact
107+
if: ${{ !cancelled() }}
108+
uses: actions/upload-artifact@v4
109+
with:
110+
name: ${{ matrix.test_case }}
111+
path: ${{ github.workspace }}/${{ inputs.example }}/tests/*.log

.github/workflows/pr-docker-compose-e2e.yml

Lines changed: 8 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,7 @@ on:
1313
- "**/ui/**"
1414
- "!**.md"
1515
- "!**.txt"
16-
- .github/workflows/docker-compose-e2e.yml
17-
workflow_dispatch:
16+
- .github/workflows/pr-docker-compose-e2e.yml
1817

1918
# If there is a new commit, the previous jobs will be canceled
2019
concurrency:
@@ -37,55 +36,13 @@ jobs:
3736
mega_service: "${{ matrix.example }}"
3837
runner_label: "docker-build-${{ matrix.hardware }}"
3938

40-
Example-test:
39+
example-test:
4140
needs: [job1, mega-image-build]
4241
strategy:
4342
matrix: ${{ fromJSON(needs.job1.outputs.run_matrix) }}
44-
runs-on: ${{ matrix.hardware }}
45-
continue-on-error: true
46-
steps:
47-
- name: Test example
48-
run: |
49-
echo "Matrix - example ${{ matrix.example }}, hardware ${{ matrix.hardware }}"
50-
51-
- name: Clean Up Working Directory
52-
run: sudo rm -rf ${{github.workspace}}/*
53-
54-
- name: Checkout out Repo
55-
uses: actions/checkout@v4
56-
with:
57-
ref: "refs/pull/${{ github.event.number }}/merge"
58-
59-
- name: Run test
60-
env:
61-
HUGGINGFACEHUB_API_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }}
62-
GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }}
63-
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
64-
example: ${{ matrix.example }}
65-
hardware: ${{ matrix.hardware }}
66-
IMAGE_TAG: ${{ needs.mega-image-build.outputs.image_tag }}
67-
IMAGE_REPO_GAUDI: ${{ vars.IMAGE_REPO_GAUDI }}
68-
IMAGE_REPO_XEON: ${{ vars.IMAGE_REPO_XEON }}
69-
run: |
70-
cd ${{ github.workspace }}/$example/tests
71-
if [ "$hardware" == "gaudi" ]; then IMAGE_REPO=$IMAGE_REPO_GAUDI; else IMAGE_REPO=$IMAGE_REPO_XEON; fi
72-
export IMAGE_REPO=${IMAGE_REPO}
73-
example_l=$(echo $example | tr '[:upper:]' '[:lower:]')
74-
if [ -f test_${example_l}_on_${hardware}.sh ]; then timeout 30m bash test_${example_l}_on_${hardware}.sh; else echo "Test script not found, skip test!"; fi
75-
76-
- name: Clean up container
77-
env:
78-
example: ${{ matrix.example }}
79-
hardware: ${{ matrix.hardware }}
80-
if: cancelled() || failure()
81-
run: |
82-
cd ${{ github.workspace }}/$example/docker/$hardware
83-
docker compose stop && docker compose rm -f
84-
echo y | docker system prune
85-
86-
- name: Publish pipeline artifact
87-
if: ${{ !cancelled() }}
88-
uses: actions/upload-artifact@v4
89-
with:
90-
name: ${{ matrix.example }}-${{ matrix.hardware }}
91-
path: ${{ github.workspace }}/${{ matrix.example }}/tests/*.log
43+
uses: ./.github/workflows/_run-docker-compose.yml
44+
with:
45+
tag: ${{ needs.mega-image-build.outputs.image_tag }}
46+
example: ${{ matrix.example }}
47+
hardware: ${{ matrix.hardware }}
48+
secrets: inherit

ChatQnA/README.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -123,6 +123,10 @@ Refer to the [ChatQnA helm chart](https://github.com/opea-project/GenAIInfra/tre
123123

124124
Refer to the [AI PC Guide](./docker/aipc/README.md) for instructions on deploying ChatQnA on AI PC.
125125

126+
### Deploy ChatQnA on Red Hat OpenShift Container Platform (RHOCP)
127+
128+
Refer to the [Intel Technology enabling for Openshift readme](https://github.com/intel/intel-technology-enabling-for-openshift/blob/main/workloads/opea/chatqna/README.md) for instructions to deploy ChatQnA prototype on RHOCP with [Red Hat OpenShift AI (RHOAI)](https://www.redhat.com/en/technologies/cloud-computing/openshift/openshift-ai).
129+
126130
## Consume ChatQnA Service
127131

128132
Two ways of consuming ChatQnA Service:

ChatQnA/docker/gaudi/README.md

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ docker build --no-cache -t opea/llm-tgi:latest --build-arg https_proxy=$https_pr
4646
Build vllm docker.
4747

4848
```bash
49-
docker build --no-cache -t vllm:hpu --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/llms/text-generation/vllm/docker/Dockerfile.hpu .
49+
docker build --no-cache -t opea/llm-vllm-hpu:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/llms/text-generation/vllm/docker/Dockerfile.hpu .
5050
```
5151

5252
Build microservice docker.
@@ -60,7 +60,7 @@ docker build --no-cache -t opea/llm-vllm:latest --build-arg https_proxy=$https_p
6060
Build vllm-on-ray docker.
6161

6262
```bash
63-
docker build --no-cache -t vllm_ray:habana --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/llms/text-generation/vllm-ray/docker/Dockerfile.vllmray .
63+
docker build --no-cache -t opea/llm-vllm-ray-hpu:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/llms/text-generation/vllm-ray/docker/Dockerfile.vllmray .
6464
```
6565

6666
Build microservice docker.
@@ -306,7 +306,7 @@ curl http://${host_ip}:8008/generate \
306306

307307
```bash
308308
#vLLM Service
309-
curl http://${your_ip}:8008/v1/completions \
309+
curl http://${host_ip}:8008/v1/completions \
310310
-H "Content-Type: application/json" \
311311
-d '{
312312
"model": "${LLM_MODEL_ID}",
@@ -318,7 +318,7 @@ curl http://${your_ip}:8008/v1/completions \
318318

319319
```bash
320320
#vLLM-on-Ray Service
321-
curl http://${your_ip}:8008/v1/chat/completions \
321+
curl http://${host_ip}:8008/v1/chat/completions \
322322
-H "Content-Type: application/json" \
323323
-d '{"model": "${LLM_MODEL_ID}", "messages": [{"role": "user", "content": "What is Deep Learning?"}]}'
324324
```

ChatQnA/docker/gaudi/compose_vllm.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,7 @@ services:
109109
HF_HUB_ENABLE_HF_TRANSFER: 0
110110
restart: unless-stopped
111111
vllm-service:
112-
image: vllm:hpu
112+
image: opea/llm-vllm-hpu:latest
113113
container_name: vllm-gaudi-server
114114
ports:
115115
- "8008:80"

ChatQnA/docker/gaudi/compose_vllm_ray.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,7 @@ services:
109109
HF_HUB_ENABLE_HF_TRANSFER: 0
110110
restart: unless-stopped
111111
vllm-ray-service:
112-
image: vllm_ray:habana
112+
image: opea/llm-vllm-ray-hpu:latest
113113
container_name: vllm-ray-gaudi-server
114114
ports:
115115
- "8008:8000"

ChatQnA/docker/ui/svelte/src/lib/shared/components/upload/upload-knowledge.svelte

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@
4343
on:change={handleInput}
4444
class="focus:border-blue-700 focus:ring-0"
4545
data-testid="file-upload"
46-
accept=".txt,.pdf,.json"
46+
accept=".txt,.pdf,.json,.md"
4747
/>
4848
</Label>
4949
</div>

ChatQnA/docker/xeon/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ Build vLLM docker.
9999
```bash
100100
git clone https://github.com/vllm-project/vllm.git
101101
cd ./vllm/
102-
docker build --no-cache -t vllm:cpu --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile.cpu .
102+
docker build --no-cache -t opea/vllm:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile.cpu .
103103
```
104104

105105
Build microservice.

ChatQnA/docker/xeon/docker_compose_qdrant.yaml renamed to ChatQnA/docker/xeon/compose_qdrant.yaml

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -189,19 +189,6 @@ services:
189189
- DELETE_FILE=${DATAPREP_DELETE_FILE_ENDPOINT}
190190
ipc: host
191191
restart: always
192-
chaqna-xeon-conversation-ui-server:
193-
image: opea/chatqna-conversation-ui:latest
194-
container_name: chatqna-xeon-conversation-ui-server
195-
environment:
196-
- no_proxy=${no_proxy}
197-
- https_proxy=${https_proxy}
198-
- http_proxy=${http_proxy}
199-
ports:
200-
- 5174:80
201-
depends_on:
202-
- chaqna-xeon-backend-server
203-
ipc: host
204-
restart: always
205192

206193
networks:
207194
default:

0 commit comments

Comments
 (0)