Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
Signed-off-by: Sertac Ozercan <[email protected]>
  • Loading branch information
sozercan committed Dec 13, 2023
1 parent 6d38e2b commit cade0ee
Show file tree
Hide file tree
Showing 3 changed files with 110 additions and 13 deletions.
89 changes: 89 additions & 0 deletions .github/workflows/test-docker-sd.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
name: docker-test-stablediffusion

on:
push:
branches:
- main
paths-ignore:
- '**.md'
pull_request:
branches:
- main
paths-ignore:
- '**.md'

permissions: read-all

jobs:
test:
runs-on: ubuntu-latest
timeout-minutes: 240
steps:
- name: Harden Runner
uses: step-security/harden-runner@eb238b55efaa70779f274895e782ed17c84f2895 # v2.6.1
with:
egress-policy: audit

- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1

- name: cleanup disk space
run: |
docker system prune -f -a --volumes
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
# need containerd image store for testing local images
- uses: crazy-max/ghaction-setup-docker@69b561f709cbd934060981d481ccfc316cfb77b7 # v2.2.0
with:
daemon-config: |
{
"debug": true,
"features": {
"containerd-snapshotter": true
}
}
- uses: crazy-max/ghaction-github-runtime@b3a9207c0e1ef41f4cf215303c976869d0c2c1c4 # v3.0.0

- name: build aikit
run: |
docker buildx build . -t aikit:test \
--load --progress plain \
--cache-from=type=gha,scope=aikit \
--cache-to=type=gha,scope=aikit,mode=max
- name: build test model
run: |
docker buildx build . -t testmodel:test \
-f test/aikitfile-sd.yaml \
--load --progress plain \
--cache-from=type=gha,scope=testmodel \
--cache-to=type=gha,scope=testmodel,mode=max
- name: list images
run: docker images

- name: run test model
run: docker run --name testmodel -d -p 8080:8080 testmodel:test

- name: install e2e dependencies
run: make test-e2e-dependencies

- name: run test
run: |
curl http://localhost:8080/v1/chat/completions -H "Content-Type: application/json" -d '{
"model": "llava",
"messages": [{"role": "user", "content": [{"type":"text", "text": "What is in the image?"}, {"type": "image_url", "image_url": {"url": "https://discovery.sndimg.com/content/dam/images/discovery/fullset/2021/12/1/GettyImages-1280574445.jpg.rend.hgtvcom.966.644.suffix/1638387352203.jpeg" }}], "temperature": 0.9}]}'
- name: save logs
if: always()
run: |
docker logs testmodel > docker.log
- name: publish logs
if: always()
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3
with:
name: docker-logs
path: docker.log
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ AIKit uses [LocalAI](https://localai.io/) under-the-hood to run inference. Local
- 🚀 [Easy to use declarative configuration](./docs/specs.md)
- ✨ OpenAI API compatible to use with any OpenAI API compatible client
- 🖼️ [Multi-modal model support](#vision-with-llava)
- 🚢 Kubernetes deployment ready
- 🚢 [Kubernetes deployment ready](#kubernetes-deployment)
- 📦 Supports multiple models with a single image
- 🖥️ [Supports GPU-accelerated inferencing with NVIDIA GPUs](#nvidia)
- 🔐 [Signed images for `aikit` and pre-made models](./docs/cosign.md)
Expand Down
32 changes: 20 additions & 12 deletions pkg/aikit2llb/convert.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,31 +22,39 @@ const (

func Aikit2LLB(c *config.Config) (llb.State, *specs.Image) {
var merge llb.State
s := llb.Image(debianSlim)
s, merge = copyModels(c, s)
s, merge = addLocalAI(c, s, merge)
state := llb.Image(debianSlim)
base := getBaseImage(c)

state, merge = copyModels(c, base, state)
state, merge = addLocalAI(c, state, merge)

// install cuda if runtime is nvidia
if c.Runtime == utils.RuntimeNVIDIA {
merge = installCuda(s, merge)
merge = installCuda(state, merge)
}

// install opencv and friends if stable diffusion backend is being used
for b := range c.Backends {
if strings.Contains(c.Backends[b], "stablediffusion") {
merge = installOpenCV(s, merge)
merge = installOpenCV(state, merge)
}
}

imageCfg := NewImageConfig(c)
return merge, imageCfg
}

func copyModels(c *config.Config, s llb.State) (llb.State, llb.State) {
var base llb.State
func getBaseImage(c *config.Config) llb.State {
for b := range c.Backends {
if strings.Contains(c.Backends[b], "stablediffusion") {
// due to too many dependencies, we are using debian slim as base for stable diffusion
base = llb.Image(debianSlim)
} else {
base = llb.Image(distrolessBase)
// due to too many dependencies, using debian slim as base for stable diffusion
return llb.Image(debianSlim)
}
}
return llb.Image(distrolessBase)
}

func copyModels(c *config.Config, base llb.State, s llb.State) (llb.State, llb.State) {
savedState := s

// create config file if defined
Expand Down Expand Up @@ -162,7 +170,7 @@ func addLocalAI(c *config.Config, s llb.State, merge llb.State) (llb.State, llb.
opts = append(opts, llb.Chmod(0o755))
localAI := llb.HTTP(localAIURL, opts...)
s = s.File(
llb.Copy(localAI, "local-ai", "/usr/bin"),
llb.Copy(localAI, "local-ai", "/usr/bin/local-ai"),
llb.WithCustomName("Copying "+fileNameFromURL(localAIURL)+" to /usr/bin"), //nolint: goconst
)

Expand Down

0 comments on commit cade0ee

Please sign in to comment.