Skip to content

Commit

Permalink
rebase
Browse files Browse the repository at this point in the history
  • Loading branch information
sixianyi0721 committed Jan 17, 2025
1 parent 0fefd43 commit 80afa68
Show file tree
Hide file tree
Showing 3 changed files with 151 additions and 45 deletions.
80 changes: 80 additions & 0 deletions .github/workflows/ci-test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
name: Integration-test

on:
workflow_dispatch:
push:

run-name:
"CI test"

env:
TESTS_PATH: "${{ github.workspace }}/llama_stack/providers/tests"

jobs:
build:
name: Build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3

- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.x'


run_tests:
name: Run tests
runs-on: ubuntu-latest
needs:
build
steps:
- name: "Checkout 'meta-llama/llama-stack' repository"
uses: actions/checkout@v3

- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.x'

- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
pip install -e .
pip install -U \
torch torchvision \
pytest pytest_asyncio \
fairscale lm-format-enforcer \
zmq chardet pypdf \
pandas sentence_transformers together \
aiosqlite groq fireworks-ai \
pytest-asyncio
# - name: Install Ollama
# run: |
# curl -fsSL https://ollama.com/install.sh | sh

# - name: spin up ollama server
# run: |
# ollama run llama3.1:8b-instruct-fp16
# sleep 10

- name: Run integration test
working-directory: "${{ github.workspace }}"
run: |
export FIREWORKS_API_KEY=${{ secrets.FIREWORKS_API_KEY }}
export TOGETHER_API_KEY=${{ secrets.TOGETHER_API_KEY }}
echo "Current directory: $(pwd)"
echo "Repository root: ${{ github.workspace }}"
echo "Branch: ${{ github.ref }}"
echo "List of tests"
pytest ${{ github.workspace }}/llama_stack/providers/tests/ -k "fireworks and together" --config=github_ci_test_config.yaml
# - name: Upload pytest test results
# uses: actions/upload-artifact@v4
# with:
# name: pytest_report.md
# path: pytest_report.md
# # Use always() to always run this step to publish test results when there are test failures
# if: ${{ always() }}
97 changes: 52 additions & 45 deletions .github/workflows/publish-to-test-pypi.yml
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
name: Publish Python 🐍 distribution 📦 to TestPyPI

on:
workflow_dispatch: # Keep manual trigger
inputs:
version:
description: 'Version number (e.g. 0.0.63.dev20250111)'
required: true
type: string
schedule:
- cron: "0 0 * * *" # Run every day at midnight
push:
# workflow_dispatch: # Keep manual trigger
# inputs:
# version:
# description: 'Version number (e.g. 0.0.63.dev20250111)'
# required: true
# type: string
# schedule:
# - cron: "0 0 * * *" # Run every day at midnight

jobs:
trigger-client-and-models-build:
Expand Down Expand Up @@ -201,43 +202,49 @@ jobs:
runs-on: ubuntu-latest
env:
TOGETHER_API_KEY: ${{ secrets.TOGETHER_API_KEY }}
FIREWORKS_API_KEY: ${{ secrets.FIREWORKS_API_KEY }}
TAVILY_SEARCH_API_KEY: ${{ secrets.TAVILY_SEARCH_API_KEY }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Install the package
run: |
max_attempts=6
attempt=1
while [ $attempt -le $max_attempts ]; do
echo "Attempt $attempt of $max_attempts to install package..."
if pip install --no-cache --index-url https://pypi.org/simple/ --extra-index-url https://test.pypi.org/simple/ llama-stack==${{ needs.trigger-client-and-models-build.outputs.version }}; then
echo "Package installed successfully"
break
fi
if [ $attempt -ge $max_attempts ]; then
echo "Failed to install package after $max_attempts attempts"
exit 1
fi
attempt=$((attempt + 1))
sleep 10
done
- name: Test the package versions
# - uses: actions/checkout@v4
# with:
# persist-credentials: false
# - name: Install the package
# run: |
# max_attempts=6
# attempt=1
# while [ $attempt -le $max_attempts ]; do
# echo "Attempt $attempt of $max_attempts to install package..."
# if pip install --no-cache --index-url https://pypi.org/simple/ --extra-index-url https://test.pypi.org/simple/ llama-stack==${{ needs.trigger-client-and-models-build.outputs.version }}; then
# echo "Package installed successfully"
# break
# fi
# if [ $attempt -ge $max_attempts ]; then
# echo "Failed to install package after $max_attempts attempts"
# exit 1
# fi
# attempt=$((attempt + 1))
# sleep 10
# done
# - name: Test the package versions
# run: |
# pip list | grep llama_
# - name: Test CLI commands
# run: |
# llama model list
# llama stack build --list-templates
# llama model prompt-format -m Llama3.2-11B-Vision-Instruct
# llama stack list-apis
# llama stack list-providers inference
# llama stack list-providers telemetry
# - name: Test Notebook
# run: |
# pip install pytest nbval
# llama stack build --template together --image-type venv
# pytest -v -s --nbval-lax ./docs/notebooks/Llama_Stack_Building_AI_Applications.ipynb
- name: Integration tests
working-directory: "${{ github.workspace }}"
run: |
pip list | grep llama_
- name: Test CLI commands
run: |
llama model list
llama stack build --list-templates
llama model prompt-format -m Llama3.2-11B-Vision-Instruct
llama stack list-apis
llama stack list-providers inference
llama stack list-providers telemetry
- name: Test Notebook
run: |
pip install pytest nbval
llama stack build --template together --image-type venv
pytest -v -s --nbval-lax ./docs/notebooks/Llama_Stack_Building_AI_Applications.ipynb
# TODO: add trigger for integration test workflow & docker builds
pip install pytest_html
export FIREWORKS_API_KEY=${{ secrets.FIREWORKS_API_KEY }}
export TOGETHER_API_KEY=${{ secrets.TOGETHER_API_KEY }}
pytest ${{ github.workspace }}/llama_stack/providers/tests/ -k "fireworks and together" --config=github_ci_test_config.yaml
19 changes: 19 additions & 0 deletions llama_stack/providers/tests/github_ci_test_config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
inference:
tests:
- inference/test_vision_inference.py::test_vision_chat_completion_streaming
- inference/test_vision_inference.py::test_vision_chat_completion_non_streaming
- inference/test_text_inference.py::test_structured_output
- inference/test_text_inference.py::test_completion
- inference/test_text_inference.py::test_chat_completion_streaming
- inference/test_text_inference.py::test_chat_completion_non_streaming
- inference/test_text_inference.py::test_chat_completion_with_tool_calling
- inference/test_text_inference.py::test_chat_completion_with_tool_calling_streaming

fixtures:
provider_fixtures:
- default_fixture_param_id: fireworks
- inference: together

inference_models:
- meta-llama/Llama-3.1-8B-Instruct
- meta-llama/Llama-3.2-11B-Vision-Instruct

0 comments on commit 80afa68

Please sign in to comment.