Dockerize (#22)
* created Dockerfile and docker-compose.yml * Update docker-compose.yml * added docker instructions to readme
This commit is contained in:
6
.dockerignore
Normal file
6
.dockerignore
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
tmp
|
||||||
|
cache
|
||||||
|
Dockerfile
|
||||||
|
docker-compose.yml
|
||||||
|
.dockerignore
|
||||||
|
.gitignore
|
||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,2 +1,3 @@
|
|||||||
__pycache__
|
__pycache__
|
||||||
tmp
|
tmp
|
||||||
|
cache
|
||||||
13
Dockerfile
Normal file
13
Dockerfile
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
FROM pytorch/pytorch:2.4.0-cuda12.1-cudnn9-devel
|
||||||
|
|
||||||
|
ENV PYTHONUNBUFFERED 1
|
||||||
|
|
||||||
|
WORKDIR /usr/src/app
|
||||||
|
|
||||||
|
# Install packages
|
||||||
|
RUN apt-get update && apt-get install -y git && rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
COPY requirements.txt ./
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
COPY . .
|
||||||
10
README.md
10
README.md
@@ -12,6 +12,7 @@
|
|||||||
- [Modularity](#modularity)
|
- [Modularity](#modularity)
|
||||||
* [Setup](#setup)
|
* [Setup](#setup)
|
||||||
* [Usage](#usage)
|
* [Usage](#usage)
|
||||||
|
- [Docker Server approach](#docker-server)
|
||||||
- [Server/Client approach](#serverclient-approach)
|
- [Server/Client approach](#serverclient-approach)
|
||||||
- [Local approach](#local-approach)
|
- [Local approach](#local-approach)
|
||||||
* [Command-line usage](#command-line-usage)
|
* [Command-line usage](#command-line-usage)
|
||||||
@@ -56,6 +57,15 @@ The pipeline can be run in two ways:
|
|||||||
- **Server/Client approach**: Models run on a server, and audio input/output are streamed from a client.
|
- **Server/Client approach**: Models run on a server, and audio input/output are streamed from a client.
|
||||||
- **Local approach**: Uses the same client/server method but with the loopback address.
|
- **Local approach**: Uses the same client/server method but with the loopback address.
|
||||||
|
|
||||||
|
### Docker Server
|
||||||
|
|
||||||
|
#### Install the NVIDIA Container Toolkit
|
||||||
|
|
||||||
|
https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html
|
||||||
|
|
||||||
|
#### Start the docker container
|
||||||
|
```docker compose up```
|
||||||
|
|
||||||
### Server/Client Approach
|
### Server/Client Approach
|
||||||
|
|
||||||
To run the pipeline on the server:
|
To run the pipeline on the server:
|
||||||
|
|||||||
40
docker-compose.yml
Normal file
40
docker-compose.yml
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
---
|
||||||
|
version: "3.8"
|
||||||
|
services:
|
||||||
|
|
||||||
|
pipeline:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
command:
|
||||||
|
- python3
|
||||||
|
- s2s_pipeline.py
|
||||||
|
- --recv_host
|
||||||
|
- 0.0.0.0
|
||||||
|
- --send_host
|
||||||
|
- 0.0.0.0
|
||||||
|
- --lm_model_name
|
||||||
|
- microsoft/Phi-3-mini-4k-instruct
|
||||||
|
- --init_chat_role
|
||||||
|
- system
|
||||||
|
- --init_chat_prompt
|
||||||
|
- "You are a helpful assistant"
|
||||||
|
- --stt_compile_mode
|
||||||
|
- reduce-overhead
|
||||||
|
- --tts_compile_mode
|
||||||
|
- default
|
||||||
|
expose:
|
||||||
|
- 12345/tcp
|
||||||
|
- 12346/tcp
|
||||||
|
ports:
|
||||||
|
- 12345:12345/tcp
|
||||||
|
- 12346:12346/tcp
|
||||||
|
volumes:
|
||||||
|
- ./cache/:/root/.cache/
|
||||||
|
- ./s2s_pipeline.py:/usr/src/app/s2s_pipeline.py
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
reservations:
|
||||||
|
devices:
|
||||||
|
- driver: nvidia
|
||||||
|
device_ids: ['0']
|
||||||
|
capabilities: [gpu]
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
nltk==3.8.1
|
nltk==3.9.1
|
||||||
parler_tts @ git+https://github.com/huggingface/parler-tts.git
|
parler_tts @ git+https://github.com/huggingface/parler-tts.git
|
||||||
torch==2.4.0
|
torch==2.4.0
|
||||||
sounddevice==0.5.0
|
sounddevice==0.5.0
|
||||||
|
|||||||
Reference in New Issue
Block a user