-
Notifications
You must be signed in to change notification settings - Fork 0
/
Dockerfile
61 lines (44 loc) · 2.3 KB
/
Dockerfile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
FROM ubuntu:22.04
ENV PATH="/root/miniconda3/bin:${PATH}"
ARG PATH="/root/miniconda3/bin:${PATH}"
RUN apt-get update && apt-get upgrade -y
RUN apt-get install -y wget && rm -rf /var/lib/apt/lists/*
RUN wget \
https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh \
&& mkdir /root/.conda \
&& bash Miniconda3-latest-Linux-x86_64.sh -b \
&& rm -f Miniconda3-latest-Linux-x86_64.sh
RUN conda init bash
RUN pip install torch torchvision torchaudio
RUN pip install torchserve torch-model-archiver torch-workflow-archiver
RUN pip install transformers optimum
# install git
RUN apt-get update && apt-get upgrade -y && apt-get install -y --no-install-recommends \
curl git && \
apt-get clean
# install torchserve dependencies
RUN apt-get update && apt-get upgrade -y && apt-get install -y --no-install-recommends \
openjdk-17-jdk && \
apt-get clean
RUN curl -sL https://deb.nodesource.com/setup_14.x | bash -
RUN apt install -y nodejs
RUN pip install captum nvgpu
WORKDIR /workspace/bettertransformer_demo
# using wget because git clone would download tensorflow, etc. and we don't care
RUN wget -P ./distilbert-base-uncased-finetuned-sst-2-english/ https://huggingface.co/distilbert-base-uncased-finetuned-sst-2-english/resolve/main/pytorch_model.bin
RUN wget -P ./distilbert-base-uncased-finetuned-sst-2-english/ https://huggingface.co/distilbert-base-uncased-finetuned-sst-2-english/resolve/main/config.json
ADD "https://www.random.org/cgi-bin/randbyte?nbytes=10&format=h" skipcache
RUN git init . && git remote add -f origin https://github.com/fxmarty/bettertransformer_demo.git && git checkout main
ARG PROP_PATH
ARG MAR_NAME
ARG USE_BETTERTRANSFORMER="no"
ENV PROP_PATH_VAR=$PROP_PATH
ENV MAR_NAME_VAR=$MAR_NAME
ENV USE_BETTERTRANSFORMER_VAR=$USE_BETTERTRANSFORMER
RUN torch-model-archiver --model-name ${MAR_NAME_VAR} \
--version 1.0 --serialized-file distilbert-base-uncased-finetuned-sst-2-english/pytorch_model.bin \
--handler ./transformer_text_classification_handler.py \
--extra-files "distilbert-base-uncased-finetuned-sst-2-english/config.json,./setup_config.json,./index_to_name.json" \
-f \
--export-path model_store
CMD torchserve --start --model-store model_store --models my_tc=${MAR_NAME_VAR}.mar --ncs --ts-config ${PROP_PATH_VAR} && tail -f /dev/null