Skip to content

Commit 95eb72b

Browse files
authored
feat: add 🐸 coqui (#1489)
* feat: add coqui * docs: update news
1 parent 7e2d101 commit 95eb72b

File tree

15 files changed

+706
-100
lines changed

15 files changed

+706
-100
lines changed

.github/workflows/test-extra.yml

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -248,3 +248,30 @@ jobs:
248248
export PATH=$PATH:/opt/conda/bin
249249
make -C backend/python/vall-e-x
250250
make -C backend/python/vall-e-x test
251+
252+
tests-coqui:
253+
runs-on: ubuntu-latest
254+
steps:
255+
- name: Clone
256+
uses: actions/checkout@v4
257+
with:
258+
submodules: true
259+
- name: Dependencies
260+
run: |
261+
sudo apt-get update
262+
sudo apt-get install build-essential ffmpeg
263+
curl https://repo.anaconda.com/pkgs/misc/gpgkeys/anaconda.asc | gpg --dearmor > conda.gpg && \
264+
sudo install -o root -g root -m 644 conda.gpg /usr/share/keyrings/conda-archive-keyring.gpg && \
265+
gpg --keyring /usr/share/keyrings/conda-archive-keyring.gpg --no-default-keyring --fingerprint 34161F5BF5EB1D4BFBBB8F0A8AEB4F8B29D82806 && \
266+
sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" > /etc/apt/sources.list.d/conda.list' && \
267+
sudo /bin/bash -c 'echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" | tee -a /etc/apt/sources.list.d/conda.list' && \
268+
sudo apt-get update && \
269+
sudo apt-get install -y conda
270+
sudo apt-get install -y ca-certificates cmake curl patch espeak espeak-ng
271+
sudo rm -rfv /usr/bin/conda || true
272+
273+
- name: Test coqui
274+
run: |
275+
export PATH=$PATH:/opt/conda/bin
276+
make -C backend/python/coqui
277+
make -C backend/python/coqui test

Dockerfile

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ ARG TARGETVARIANT
1313

1414
ENV BUILD_TYPE=${BUILD_TYPE}
1515

16-
ENV EXTERNAL_GRPC_BACKENDS="huggingface-embeddings:/build/backend/python/sentencetransformers/run.sh,petals:/build/backend/python/petals/run.sh,transformers:/build/backend/python/transformers/run.sh,sentencetransformers:/build/backend/python/sentencetransformers/run.sh,autogptq:/build/backend/python/autogptq/run.sh,bark:/build/backend/python/bark/run.sh,diffusers:/build/backend/python/diffusers/run.sh,exllama:/build/backend/python/exllama/run.sh,vall-e-x:/build/backend/python/vall-e-x/run.sh,vllm:/build/backend/python/vllm/run.sh,exllama2:/build/backend/python/exllama2/run.sh,transformers-musicgen:/build/backend/python/transformers-musicgen/run.sh"
16+
ENV EXTERNAL_GRPC_BACKENDS="coqui:/build/backend/python/coqui/run.sh,huggingface-embeddings:/build/backend/python/sentencetransformers/run.sh,petals:/build/backend/python/petals/run.sh,transformers:/build/backend/python/transformers/run.sh,sentencetransformers:/build/backend/python/sentencetransformers/run.sh,autogptq:/build/backend/python/autogptq/run.sh,bark:/build/backend/python/bark/run.sh,diffusers:/build/backend/python/diffusers/run.sh,exllama:/build/backend/python/exllama/run.sh,vall-e-x:/build/backend/python/vall-e-x/run.sh,vllm:/build/backend/python/vllm/run.sh,exllama2:/build/backend/python/exllama2/run.sh,transformers-musicgen:/build/backend/python/transformers-musicgen/run.sh"
1717

1818
ENV GALLERIES='[{"name":"model-gallery", "url":"github:go-skynet/model-gallery/index.yaml"}, {"url": "github:go-skynet/model-gallery/huggingface.yaml","name":"huggingface"}]'
1919
ARG GO_TAGS="stablediffusion tts"
@@ -69,10 +69,7 @@ RUN curl https://repo.anaconda.com/pkgs/misc/gpgkeys/anaconda.asc | gpg --dearmo
6969
ENV PATH="/root/.cargo/bin:${PATH}"
7070
RUN pip install --upgrade pip
7171
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
72-
73-
74-
# \
75-
# ; fi
72+
RUN apt-get install -y espeak-ng espeak
7673

7774
###################################
7875
###################################
@@ -192,6 +189,9 @@ RUN if [ "${IMAGE_TYPE}" = "extras" ]; then \
192189
RUN if [ "${IMAGE_TYPE}" = "extras" ]; then \
193190
PATH=$PATH:/opt/conda/bin make -C backend/python/transformers-musicgen \
194191
; fi
192+
RUN if [ "${IMAGE_TYPE}" = "extras" ]; then \
193+
PATH=$PATH:/opt/conda/bin make -C backend/python/coqui \
194+
; fi
195195

196196
# Define the health check command
197197
HEALTHCHECK --interval=1m --timeout=10m --retries=10 \

Makefile

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -395,6 +395,7 @@ protogen-python:
395395
python3 -m grpc_tools.protoc -Ibackend/ --python_out=backend/python/exllama/ --grpc_python_out=backend/python/exllama/ backend/backend.proto
396396
python3 -m grpc_tools.protoc -Ibackend/ --python_out=backend/python/bark/ --grpc_python_out=backend/python/bark/ backend/backend.proto
397397
python3 -m grpc_tools.protoc -Ibackend/ --python_out=backend/python/diffusers/ --grpc_python_out=backend/python/diffusers/ backend/backend.proto
398+
python3 -m grpc_tools.protoc -Ibackend/ --python_out=backend/python/coqui/ --grpc_python_out=backend/python/coqui/ backend/backend.proto
398399
python3 -m grpc_tools.protoc -Ibackend/ --python_out=backend/python/vall-e-x/ --grpc_python_out=backend/python/vall-e-x/ backend/backend.proto
399400
python3 -m grpc_tools.protoc -Ibackend/ --python_out=backend/python/vllm/ --grpc_python_out=backend/python/vllm/ backend/backend.proto
400401
python3 -m grpc_tools.protoc -Ibackend/ --python_out=backend/python/petals/ --grpc_python_out=backend/python/petals/ backend/backend.proto
@@ -405,6 +406,7 @@ protogen-python:
405406
prepare-extra-conda-environments:
406407
$(MAKE) -C backend/python/autogptq
407408
$(MAKE) -C backend/python/bark
409+
$(MAKE) -C backend/python/coqui
408410
$(MAKE) -C backend/python/diffusers
409411
$(MAKE) -C backend/python/vllm
410412
$(MAKE) -C backend/python/sentencetransformers

README.md

Lines changed: 7 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -26,10 +26,6 @@
2626
2727
[![tests](https://github.com/go-skynet/LocalAI/actions/workflows/test.yml/badge.svg)](https://github.com/go-skynet/LocalAI/actions/workflows/test.yml)[![Build and Release](https://github.com/go-skynet/LocalAI/actions/workflows/release.yaml/badge.svg)](https://github.com/go-skynet/LocalAI/actions/workflows/release.yaml)[![build container images](https://github.com/go-skynet/LocalAI/actions/workflows/image.yml/badge.svg)](https://github.com/go-skynet/LocalAI/actions/workflows/image.yml)[![Bump dependencies](https://github.com/go-skynet/LocalAI/actions/workflows/bump_deps.yaml/badge.svg)](https://github.com/go-skynet/LocalAI/actions/workflows/bump_deps.yaml)[![Artifact Hub](https://img.shields.io/endpoint?url=https://artifacthub.io/badge/repository/localai)](https://artifacthub.io/packages/search?repo=localai)
2828

29-
**LocalAI** is the free, Open Source OpenAI alternative. LocalAI act as a drop-in replacement REST API that’s compatible with OpenAI API specifications for local inferencing. It allows you to run LLMs, generate images, audio (and not only) locally or on-prem with consumer grade hardware, supporting multiple model families. Does not require GPU.
30-
31-
<p align="center"><b>Follow LocalAI </b></p>
32-
3329
<p align="center">
3430
<a href="https://twitter.com/LocalAI_API" target="blank">
3531
<img src="https://img.shields.io/twitter/follow/LocalAI_API?label=Follow: LocalAI_API&style=social" alt="Follow LocalAI_API"/>
@@ -38,61 +34,25 @@
3834
<img src="https://dcbadge.vercel.app/api/server/uJAeKSAGDy?style=flat-square&theme=default-inverted" alt="Join LocalAI Discord Community"/>
3935
</a>
4036

41-
<p align="center"><b>Connect with the Creator </b></p>
42-
43-
<p align="center">
44-
<a href="https://twitter.com/mudler_it" target="blank">
45-
<img src="https://img.shields.io/twitter/follow/mudler_it?label=Follow: mudler_it&style=social" alt="Follow mudler_it"/>
46-
</a>
47-
<a href='https://github.com/mudler'>
48-
<img alt="Follow on Github" src="https://img.shields.io/badge/Follow-mudler-black?logo=github&link=https%3A%2F%2Fgithub.com%2Fmudler">
49-
</a>
50-
</p>
51-
52-
<p align="center"><b>Share LocalAI Repository</b></p>
53-
54-
<p align="center">
55-
56-
<a href="https://twitter.com/intent/tweet?text=Check%20this%20GitHub%20repository%20out.%20LocalAI%20-%20Let%27s%20you%20easily%20run%20LLM%20locally.&url=https://github.com/go-skynet/LocalAI&hashtags=LocalAI,AI" target="blank">
57-
<img src="https://img.shields.io/twitter/follow/_LocalAI?label=Share Repo on Twitter&style=social" alt="Follow _LocalAI"/></a>
58-
<a href="https://t.me/share/url?text=Check%20this%20GitHub%20repository%20out.%20LocalAI%20-%20Let%27s%20you%20easily%20run%20LLM%20locally.&url=https://github.com/go-skynet/LocalAI" target="_blank"><img src="https://img.shields.io/twitter/url?label=Telegram&logo=Telegram&style=social&url=https://github.com/go-skynet/LocalAI" alt="Share on Telegram"/></a>
59-
<a href="https://api.whatsapp.com/send?text=Check%20this%20GitHub%20repository%20out.%20LocalAI%20-%20Let%27s%20you%20easily%20run%20LLM%20locally.%20https://github.com/go-skynet/LocalAI"><img src="https://img.shields.io/twitter/url?label=whatsapp&logo=whatsapp&style=social&url=https://github.com/go-skynet/LocalAI" /></a> <a href="https://www.reddit.com/submit?url=https://github.com/go-skynet/LocalAI&title=Check%20this%20GitHub%20repository%20out.%20LocalAI%20-%20Let%27s%20you%20easily%20run%20LLM%20locally.
60-
" target="blank">
61-
<img src="https://img.shields.io/twitter/url?label=Reddit&logo=Reddit&style=social&url=https://github.com/go-skynet/LocalAI" alt="Share on Reddit"/>
62-
</a> <a href="mailto:?subject=Check%20this%20GitHub%20repository%20out.%20LocalAI%20-%20Let%27s%20you%20easily%20run%20LLM%20locally.%3A%0Ahttps://github.com/go-skynet/LocalAI" target="_blank"><img src="https://img.shields.io/twitter/url?label=Gmail&logo=Gmail&style=social&url=https://github.com/go-skynet/LocalAI"/></a> <a href="https://www.buymeacoffee.com/mudler" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/default-orange.png" alt="Buy Me A Coffee" height="23" width="100" style="border-radius:1px"></a>
63-
64-
</p>
65-
66-
## 💻 [Getting started](https://localai.io/basics/getting_started/index.html)
37+
**LocalAI** is the free, Open Source OpenAI alternative. LocalAI act as a drop-in replacement REST API that’s compatible with OpenAI API specifications for local inferencing. It allows you to run LLMs, generate images, audio (and not only) locally or on-prem with consumer grade hardware, supporting multiple model families. Does not require GPU.
6738

6839
## 🔥🔥 Hot topics / Roadmap
6940

7041
[Roadmap](https://github.com/mudler/LocalAI/issues?q=is%3Aissue+is%3Aopen+label%3Aroadmap)
7142

72-
🆕 New! [LLM finetuning guide](https://localai.io/advanced/fine-tuning/)
43+
- 🐸 Coqui: https://github.com/mudler/LocalAI/pull/1489
44+
- Inline templates: https://github.com/mudler/LocalAI/pull/1452
45+
- Mixtral: https://github.com/mudler/LocalAI/pull/1449
46+
- Img2vid https://github.com/mudler/LocalAI/pull/1442
47+
- Musicgen https://github.com/mudler/LocalAI/pull/1387
7348

7449
Hot topics (looking for contributors):
7550
- Backends v2: https://github.com/mudler/LocalAI/issues/1126
7651
- Improving UX v2: https://github.com/mudler/LocalAI/issues/1373
7752

7853
If you want to help and contribute, issues up for grabs: https://github.com/mudler/LocalAI/issues?q=is%3Aissue+is%3Aopen+label%3A%22up+for+grabs%22
7954

80-
81-
82-
<hr>
83-
84-
In a nutshell:
85-
86-
- Local, OpenAI drop-in alternative REST API. You own your data.
87-
- NO GPU required. NO Internet access is required either
88-
- Optional, GPU Acceleration is available in `llama.cpp`-compatible LLMs. See also the [build section](https://localai.io/basics/build/index.html).
89-
- Supports multiple models
90-
- 🏃 Once loaded the first time, it keep models loaded in memory for faster inference
91-
- ⚡ Doesn't shell-out, but uses C++ bindings for a faster inference and better performance.
92-
93-
LocalAI was created by [Ettore Di Giacinto](https://github.com/mudler/) and is a community-driven project, focused on making the AI accessible to anyone. Any contribution, feedback and PR is welcome!
94-
95-
Note that this started just as a [fun weekend project](https://localai.io/#backstory) in order to try to create the necessary pieces for a full AI assistant like `ChatGPT`: the community is growing fast and we are working hard to make it better and more stable. If you want to help, please consider contributing (see below)!
55+
## 💻 [Getting started](https://localai.io/basics/getting_started/index.html)
9656

9757
## 🚀 [Features](https://localai.io/features/)
9858

backend/python/common-env/transformers/transformers-nvidia.yml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ dependencies:
5353
- mpmath==1.3.0
5454
- multidict==6.0.4
5555
- multiprocess==0.70.15
56-
- networkx==3.1
56+
- networkx
5757
- numpy==1.26.0
5858
- nvidia-cublas-cu12==12.1.3.1
5959
- nvidia-cuda-cupti-cu12==12.1.105
@@ -68,7 +68,7 @@ dependencies:
6868
- nvidia-nvjitlink-cu12==12.2.140
6969
- nvidia-nvtx-cu12==12.1.105
7070
- packaging==23.2
71-
- pandas==2.1.1
71+
- pandas
7272
- peft==0.5.0
7373
- git+https://github.com/bigscience-workshop/petals
7474
- protobuf==4.24.4
@@ -90,6 +90,7 @@ dependencies:
9090
- torchaudio==2.1.0
9191
- tqdm==4.66.1
9292
- transformers==4.34.0
93+
- TTS==0.22.0
9394
- triton==2.1.0
9495
- typing-extensions==4.8.0
9596
- tzdata==2023.3

backend/python/common-env/transformers/transformers.yml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ dependencies:
3333
- boto3==1.28.61
3434
- botocore==1.31.61
3535
- certifi==2023.7.22
36+
- TTS==0.22.0
3637
- charset-normalizer==3.3.0
3738
- datasets==2.14.5
3839
- sentence-transformers==2.2.2
@@ -53,10 +54,10 @@ dependencies:
5354
- mpmath==1.3.0
5455
- multidict==6.0.4
5556
- multiprocess==0.70.15
56-
- networkx==3.1
57+
- networkx
5758
- numpy==1.26.0
5859
- packaging==23.2
59-
- pandas==2.1.1
60+
- pandas
6061
- peft==0.5.0
6162
- git+https://github.com/bigscience-workshop/petals
6263
- protobuf==4.24.4

backend/python/coqui/Makefile

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
.PHONY: coqui
2+
coqui:
3+
$(MAKE) -C ../common-env/transformers
4+
5+
.PHONY: run
6+
run:
7+
@echo "Running coqui..."
8+
bash run.sh
9+
@echo "coqui run."
10+
11+
.PHONY: test
12+
test:
13+
@echo "Testing coqui..."
14+
bash test.sh
15+
@echo "coqui tested."

backend/python/coqui/README.md

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
# Creating a separate environment for ttsbark project
2+
3+
```
4+
make coqui
5+
```
6+
7+
# Testing the gRPC server
8+
9+
```
10+
make test
11+
```

0 commit comments

Comments
 (0)