Skip to content

Commit 96a6f55

Browse files
author
Joan Martinez
committed
Merge branch 'master' of https://github.com/JoanFM/llama.cpp into feat-jina-v2-base-code
2 parents 9a65c7a + 16926df commit 96a6f55

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

59 files changed

+1173
-1016
lines changed

.devops/full-cuda.Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,6 @@ ENV LLAMA_CUDA=1
3131
# Enable cURL
3232
ENV LLAMA_CURL=1
3333

34-
RUN make
34+
RUN make -j$(nproc)
3535

3636
ENTRYPOINT ["/app/.devops/tools.sh"]

.devops/full-rocm.Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,6 @@ ENV LLAMA_CURL=1
4545
RUN apt-get update && \
4646
apt-get install -y libcurl4-openssl-dev
4747

48-
RUN make
48+
RUN make -j$(nproc)
4949

5050
ENTRYPOINT ["/app/.devops/tools.sh"]

.devops/full.Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ COPY . .
1818
ENV LLAMA_CURL=1
1919

2020

21-
RUN make
21+
RUN make -j$(nproc)
2222

2323
ENV LC_ALL=C.utf8
2424

.devops/main-cuda.Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ ENV CUDA_DOCKER_ARCH=${CUDA_DOCKER_ARCH}
2323
# Enable CUDA
2424
ENV LLAMA_CUDA=1
2525

26-
RUN make
26+
RUN make -j$(nproc)
2727

2828
FROM ${BASE_CUDA_RUN_CONTAINER} as runtime
2929

.devops/main-intel.Dockerfile

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,14 @@ ARG ONEAPI_VERSION=2024.0.1-devel-ubuntu22.04
22

33
FROM intel/oneapi-basekit:$ONEAPI_VERSION as build
44

5+
RUN wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB | gpg --dearmor | tee /usr/share/keyrings/intel-oneapi-archive-keyring.gpg > /dev/null && \
6+
echo "deb [signed-by=/usr/share/keyrings/intel-oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main " | tee /etc/apt/sources.list.d/oneAPI.list && \
7+
chmod 644 /usr/share/keyrings/intel-oneapi-archive-keyring.gpg && \
8+
rm /etc/apt/sources.list.d/intel-graphics.list && \
9+
wget -O- https://repositories.intel.com/graphics/intel-graphics.key | gpg --dearmor | tee /usr/share/keyrings/intel-graphics.gpg > /dev/null && \
10+
echo "deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu jammy arc" | tee /etc/apt/sources.list.d/intel.gpu.jammy.list && \
11+
chmod 644 /usr/share/keyrings/intel-graphics.gpg
12+
513
ARG LLAMA_SYCL_F16=OFF
614
RUN apt-get update && \
715
apt-get install -y git

.devops/main-rocm.Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,6 @@ ENV LLAMA_HIPBLAS=1
4040
ENV CC=/opt/rocm/llvm/bin/clang
4141
ENV CXX=/opt/rocm/llvm/bin/clang++
4242

43-
RUN make
43+
RUN make -j$(nproc)
4444

4545
ENTRYPOINT [ "/app/main" ]

.devops/main.Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ WORKDIR /app
99

1010
COPY . .
1111

12-
RUN make
12+
RUN make -j$(nproc)
1313

1414
FROM ubuntu:$UBUNTU_VERSION as runtime
1515

.devops/server-cuda.Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ ENV LLAMA_CUDA=1
2525
# Enable cURL
2626
ENV LLAMA_CURL=1
2727

28-
RUN make
28+
RUN make -j$(nproc)
2929

3030
FROM ${BASE_CUDA_RUN_CONTAINER} as runtime
3131

.devops/server-intel.Dockerfile

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,14 @@ ARG ONEAPI_VERSION=2024.0.1-devel-ubuntu22.04
22

33
FROM intel/oneapi-basekit:$ONEAPI_VERSION as build
44

5+
RUN wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB | gpg --dearmor | tee /usr/share/keyrings/intel-oneapi-archive-keyring.gpg > /dev/null && \
6+
echo "deb [signed-by=/usr/share/keyrings/intel-oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main " | tee /etc/apt/sources.list.d/oneAPI.list && \
7+
chmod 644 /usr/share/keyrings/intel-oneapi-archive-keyring.gpg && \
8+
rm /etc/apt/sources.list.d/intel-graphics.list && \
9+
wget -O- https://repositories.intel.com/graphics/intel-graphics.key | gpg --dearmor | tee /usr/share/keyrings/intel-graphics.gpg > /dev/null && \
10+
echo "deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu jammy arc" | tee /etc/apt/sources.list.d/intel.gpu.jammy.list && \
11+
chmod 644 /usr/share/keyrings/intel-graphics.gpg
12+
513
ARG LLAMA_SYCL_F16=OFF
614
RUN apt-get update && \
715
apt-get install -y git libcurl4-openssl-dev
@@ -19,6 +27,14 @@ RUN if [ "${LLAMA_SYCL_F16}" = "ON" ]; then \
1927

2028
FROM intel/oneapi-basekit:$ONEAPI_VERSION as runtime
2129

30+
RUN wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB | gpg --dearmor | tee /usr/share/keyrings/intel-oneapi-archive-keyring.gpg > /dev/null && \
31+
echo "deb [signed-by=/usr/share/keyrings/intel-oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main " | tee /etc/apt/sources.list.d/oneAPI.list && \
32+
chmod 644 /usr/share/keyrings/intel-oneapi-archive-keyring.gpg && \
33+
rm /etc/apt/sources.list.d/intel-graphics.list && \
34+
wget -O- https://repositories.intel.com/graphics/intel-graphics.key | gpg --dearmor | tee /usr/share/keyrings/intel-graphics.gpg > /dev/null && \
35+
echo "deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu jammy arc" | tee /etc/apt/sources.list.d/intel.gpu.jammy.list && \
36+
chmod 644 /usr/share/keyrings/intel-graphics.gpg
37+
2238
RUN apt-get update && \
2339
apt-get install -y libcurl4-openssl-dev
2440

.devops/server-rocm.Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,6 @@ ENV LLAMA_CURL=1
4545
RUN apt-get update && \
4646
apt-get install -y libcurl4-openssl-dev
4747

48-
RUN make
48+
RUN make -j$(nproc)
4949

5050
ENTRYPOINT [ "/app/server" ]

.devops/server.Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ COPY . .
1111

1212
ENV LLAMA_CURL=1
1313

14-
RUN make
14+
RUN make -j$(nproc)
1515

1616
FROM ubuntu:$UBUNTU_VERSION as runtime
1717

.devops/tools.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ arg1="$1"
88
shift
99

1010
if [[ "$arg1" == '--convert' || "$arg1" == '-c' ]]; then
11-
python3 ./convert.py "$@"
11+
python3 ./convert-hf-to-gguf.py "$@"
1212
elif [[ "$arg1" == '--quantize' || "$arg1" == '-q' ]]; then
1313
./quantize "$@"
1414
elif [[ "$arg1" == '--run' || "$arg1" == '-r' ]]; then

.github/ISSUE_TEMPLATE/06-question.yml

Lines changed: 0 additions & 38 deletions
This file was deleted.
Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
name: Research
2+
description: Track new technical research area
3+
title: "Research: "
4+
labels: ["research 🔬"]
5+
body:
6+
- type: markdown
7+
attributes:
8+
value: |
9+
Don't forget to check for any [duplicate research issue tickets](https://github.com/ggerganov/llama.cpp/issues?q=is%3Aopen+is%3Aissue+label%3A%22research+%F0%9F%94%AC%22)
10+
11+
- type: checkboxes
12+
id: research-stage
13+
attributes:
14+
label: Research Stage
15+
description: Track general state of this research ticket
16+
options:
17+
- label: Background Research (Let's try to avoid reinventing the wheel)
18+
- label: Hypothesis Formed (How do you think this will work and it's effect?)
19+
- label: Strategy / Implementation Forming
20+
- label: Analysis of results
21+
- label: Debrief / Documentation (So people in the future can learn from us)
22+
23+
- type: textarea
24+
id: background
25+
attributes:
26+
label: Previous existing literature and research
27+
description: Whats the current state of the art and whats the motivation for this research?
28+
29+
- type: textarea
30+
id: hypothesis
31+
attributes:
32+
label: Hypothesis
33+
description: How do you think this will work and it's effect?
34+
35+
- type: textarea
36+
id: implementation
37+
attributes:
38+
label: Implementation
39+
description: Got an approach? e.g. a PR ready to go?
40+
41+
- type: textarea
42+
id: analysis
43+
attributes:
44+
label: Analysis
45+
description: How does the proposed implementation behave?
46+
47+
- type: textarea
48+
id: logs
49+
attributes:
50+
label: Relevant log output
51+
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
52+
render: shell

.github/ISSUE_TEMPLATE/config.yml

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
blank_issues_enabled: true
2+
contact_links:
3+
- name: Got an idea?
4+
url: https://github.com/ggerganov/llama.cpp/discussions/categories/ideas
5+
about: Pop it there. It may then become an enhancement ticket.
6+
- name: Got a question?
7+
url: https://github.com/ggerganov/llama.cpp/discussions/categories/q-a
8+
about: Ask a question there!
9+
- name: Want to contribute?
10+
url: https://github.com/ggerganov/llama.cpp/wiki/contribute
11+
about: Head to the contribution guide page of the wiki for areas you can help with
12+
13+

.github/workflows/docker.yml

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -42,9 +42,8 @@ jobs:
4242
- { tag: "light-rocm", dockerfile: ".devops/main-rocm.Dockerfile", platforms: "linux/amd64,linux/arm64" }
4343
- { tag: "full-rocm", dockerfile: ".devops/full-rocm.Dockerfile", platforms: "linux/amd64,linux/arm64" }
4444
- { tag: "server-rocm", dockerfile: ".devops/server-rocm.Dockerfile", platforms: "linux/amd64,linux/arm64" }
45-
# TODO: Disabled due to build issues https://github.com/ggerganov/llama.cpp/issues/7507
46-
#- { tag: "light-intel", dockerfile: ".devops/main-intel.Dockerfile", platforms: "linux/amd64" }
47-
#- { tag: "server-intel", dockerfile: ".devops/server-intel.Dockerfile", platforms: "linux/amd64" }
45+
- { tag: "light-intel", dockerfile: ".devops/main-intel.Dockerfile", platforms: "linux/amd64" }
46+
- { tag: "server-intel", dockerfile: ".devops/server-intel.Dockerfile", platforms: "linux/amd64" }
4847
steps:
4948
- name: Check out the repo
5049
uses: actions/checkout@v4

CMakeLists.txt

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -628,6 +628,10 @@ if (LLAMA_SYCL)
628628
add_compile_definitions(GGML_SYCL_F16)
629629
endif()
630630

631+
if (LLAMA_CUDA_FORCE_MMQ)
632+
add_compile_definitions(GGML_SYCL_FORCE_MMQ)
633+
endif()
634+
631635
add_compile_options(-I./) #include DPCT
632636
add_compile_options(-I/${SYCL_INCLUDE_DIR})
633637

@@ -1310,7 +1314,7 @@ set_target_properties(llama PROPERTIES PUBLIC_HEADER ${CMAKE_CURRENT_SOURCE_DIR}
13101314
install(TARGETS llama LIBRARY PUBLIC_HEADER)
13111315

13121316
install(
1313-
FILES convert.py
1317+
FILES convert-hf-to-gguf.py
13141318
PERMISSIONS
13151319
OWNER_READ
13161320
OWNER_WRITE

README-sycl.md

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -54,10 +54,10 @@ It has the similar design of other llama.cpp BLAS-based paths such as *OpenBLAS,
5454

5555
## OS
5656

57-
| OS | Status | Verified |
58-
|---------|---------|------------------------------------|
59-
| Linux | Support | Ubuntu 22.04, Fedora Silverblue 39 |
60-
| Windows | Support | Windows 11 |
57+
| OS | Status | Verified |
58+
|---------|---------|------------------------------------------------|
59+
| Linux | Support | Ubuntu 22.04, Fedora Silverblue 39, Arch Linux |
60+
| Windows | Support | Windows 11 |
6161

6262

6363
## Hardware
@@ -70,7 +70,7 @@ It has the similar design of other llama.cpp BLAS-based paths such as *OpenBLAS,
7070
|-------------------------------|---------|---------------------------------------|
7171
| Intel Data Center Max Series | Support | Max 1550, 1100 |
7272
| Intel Data Center Flex Series | Support | Flex 170 |
73-
| Intel Arc Series | Support | Arc 770, 730M |
73+
| Intel Arc Series | Support | Arc 770, 730M, Arc A750 |
7474
| Intel built-in Arc GPU | Support | built-in Arc GPU in Meteor Lake |
7575
| Intel iGPU | Support | iGPU in i5-1250P, i7-1260P, i7-1165G7 |
7676

0 commit comments

Comments
 (0)