Skip to content
This repository has been archived by the owner on Feb 15, 2025. It is now read-only.

Commit

Permalink
Merge pull request #109 from defenseunicorns/build-images
Browse files Browse the repository at this point in the history
pass base image tag in makefile
  • Loading branch information
Gerred Dillon authored Jun 28, 2023
2 parents 1251553 + df3c6f0 commit 88fcca3
Show file tree
Hide file tree
Showing 9 changed files with 30 additions and 14 deletions.
6 changes: 5 additions & 1 deletion .github/workflows/docker-build.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,8 @@ jobs:

- name: Run Makefile command
# just building, not pushing them
run: make api base repeater stablelm whisper
run: make api base repeater
- name: install deps
run: pip install -r models/test/repeater/requirements.txt
- name: Simple Test
run: make test-init test teardown
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@ ADD go.sum .
ADD api api
ADD pkg pkg
ADD api/models.toml .
RUN go env
RUN GOOS=linux GOARCH=amd64 CGO_ENABLED=0 go build -ldflags '-extldflags "-static"' -o app api/main.go


FROM cgr.dev/chainguard/static:latest
COPY --from=build /work/app /app
COPY api/models.toml .
Expand Down
19 changes: 13 additions & 6 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ TAG ?= 0.2.0
build: api stablelm stablelm-7b embeddings whisper

push:

docker push ghcr.io/defenseunicorns/leapfrogai/stablelm-3b:${TAG}
docker push ghcr.io/defenseunicorns/leapfrogai/embeddings:${TAG}

Expand All @@ -22,22 +21,22 @@ api-push:

stablelm:
cd models/llms/stablelm && \
docker build --network=host -t ghcr.io/defenseunicorns/leapfrogai/stablelm-3b:${TAG} .
docker build --network=host --build-arg IMAGE_TAG=${TAG} -t ghcr.io/defenseunicorns/leapfrogai/stablelm-3b:${TAG} .

embeddings:
cd models/text2vec/all-minilm-l6-v2/ && \
docker build --network=host -t ghcr.io/defenseunicorns/leapfrogai/embeddings:${TAG} .
docker build --network=host --build-arg IMAGE_TAG=${TAG} -t ghcr.io/defenseunicorns/leapfrogai/embeddings:${TAG} .

whisper:
cd models/speech2text/whisper && \
docker build --network=host -t ghcr.io/defenseunicorns/leapfrogai/whisper:${TAG} .
docker build --network=host --build-arg IMAGE_TAG=${TAG} -t ghcr.io/defenseunicorns/leapfrogai/whisper:${TAG} .

whisper-push:
docker push ghcr.io/defenseunicorns/leapfrogai/whisper:${TAG}

repeater:
cd models/test/repeater && \
docker build --network=host -t ghcr.io/defenseunicorns/leapfrogai/repeater:${TAG} .
docker build --network=host --build-arg IMAGE_TAG=${TAG} -t ghcr.io/defenseunicorns/leapfrogai/repeater:${TAG} .

repeater-push:
docker push ghcr.io/defenseunicorns/leapfrogai/repeater:${TAG}
Expand Down Expand Up @@ -88,4 +87,12 @@ update-repeater: repeater
update-stablelm: stablelm
docker tag ghcr.io/defenseunicorns/leapfrogai/stablelm-3b:${TAG} localhost:5001/defenseunicorns/leapfrogai/stablelm-3b:${TAG}-zarf-1442747400
docker push localhost:5001/defenseunicorns/leapfrogai/stablelm-3b:${TAG}-zarf-1442747400
kubectl delete pods -n leapfrogai -l app=stablelm
kubectl delete pods -n leapfrogai -l app=stablelm

test-init:
docker run -p 50051:50051 -d --rm --name repeater ghcr.io/defenseunicorns/leapfrogai/repeater:${TAG}

test:
PYTHONPATH="." python3 models/test/repeater/test.py
teardown:
docker kill repeater
3 changes: 2 additions & 1 deletion models/llms/stablelm/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
FROM ghcr.io/defenseunicorns/leapfrogai/base:0.0.4
ARG IMAGE_TAG
FROM ghcr.io/defenseunicorns/leapfrogai/base:${IMAGE_TAG}

# Install project
COPY requirements.txt .
Expand Down
3 changes: 2 additions & 1 deletion models/speech2text/whisper/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
FROM ghcr.io/defenseunicorns/leapfrogai/base:0.0.4
ARG IMAGE_TAG
FROM ghcr.io/defenseunicorns/leapfrogai/base:${IMAGE_TAG}
# Install project

COPY --chown=user:user main.py .
Expand Down
3 changes: 2 additions & 1 deletion models/test/repeater/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
FROM ghcr.io/defenseunicorns/leapfrogai/base:0.0.4
ARG IMAGE_TAG
FROM ghcr.io/defenseunicorns/leapfrogai/base:${IMAGE_TAG}

COPY requirements.txt .
RUN pip3 install -r requirements.txt
Expand Down
2 changes: 1 addition & 1 deletion models/test/repeater/repeater.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ class Repeater(leapfrog.CompletionServiceServicer):
def Complete(self, request: leapfrog.CompletionRequest, context: leapfrog.GrpcContext) -> leapfrog.CompletionResponse:
result = request.prompt # just returns what's provided
print(f"Repeater.Complete: { request }")
return leapfrog.CompletionResponse(completion=[result for _ in range(request.n)])
return leapfrog.CompletionResponse(completion=result)

def CreateEmbedding(self, request, context):
return leapfrog.EmbeddingResponse(
Expand Down
3 changes: 2 additions & 1 deletion models/test/repeater/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,14 @@

def run():
# Set up a channel to the server
with grpc.insecure_channel('localhost:50051') as channel:
with grpc.insecure_channel('127.0.0.1:50051') as channel:
# Instantiate a stub (client)
stub = leapfrog.CompletionServiceStub(channel)

# Create a request
request = leapfrog.CompletionRequest(
prompt="Hello, Chatbot!",
max_tokens=150,
# add other parameters as necessary
)

Expand Down
3 changes: 2 additions & 1 deletion models/text2vec/all-minilm-l6-v2/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
FROM ghcr.io/defenseunicorns/leapfrogai/base:0.0.4
ARG IMAGE_TAG
FROM ghcr.io/defenseunicorns/leapfrogai/base:${IMAGE_TAG}
# Install project
COPY . .
RUN pip install --no-cache-dir --user -r requirements.txt
Expand Down

0 comments on commit 88fcca3

Please sign in to comment.