-
Notifications
You must be signed in to change notification settings - Fork 752
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
0fefd43
commit 706e3d5
Showing
3 changed files
with
150 additions
and
45 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,80 @@ | ||
name: Integration-test | ||
|
||
on: | ||
workflow_dispatch: | ||
# push: | ||
|
||
run-name: | ||
"CI test" | ||
|
||
env: | ||
TESTS_PATH: "${{ github.workspace }}/llama_stack/providers/tests" | ||
|
||
jobs: | ||
build: | ||
name: Build | ||
runs-on: ubuntu-latest | ||
steps: | ||
- uses: actions/checkout@v3 | ||
|
||
- name: Set up Python | ||
uses: actions/setup-python@v4 | ||
with: | ||
python-version: '3.x' | ||
|
||
|
||
run_tests: | ||
name: Run tests | ||
runs-on: ubuntu-latest | ||
needs: | ||
build | ||
steps: | ||
- name: "Checkout 'meta-llama/llama-stack' repository" | ||
uses: actions/checkout@v3 | ||
|
||
- name: Set up Python | ||
uses: actions/setup-python@v4 | ||
with: | ||
python-version: '3.x' | ||
|
||
- name: Install dependencies | ||
run: | | ||
python -m pip install --upgrade pip | ||
pip install -r requirements.txt | ||
pip install -e . | ||
pip install -U \ | ||
torch torchvision \ | ||
pytest pytest_asyncio \ | ||
fairscale lm-format-enforcer \ | ||
zmq chardet pypdf \ | ||
pandas sentence_transformers together \ | ||
aiosqlite groq fireworks-ai \ | ||
pytest-asyncio | ||
# - name: Install Ollama | ||
# run: | | ||
# curl -fsSL https://ollama.com/install.sh | sh | ||
|
||
# - name: spin up ollama server | ||
# run: | | ||
# ollama run llama3.1:8b-instruct-fp16 | ||
# sleep 10 | ||
|
||
- name: Run integration test | ||
working-directory: "${{ github.workspace }}" | ||
run: | | ||
export FIREWORKS_API_KEY=${{ secrets.FIREWORKS_API_KEY }} | ||
export TOGETHER_API_KEY=${{ secrets.TOGETHER_API_KEY }} | ||
echo "Current directory: $(pwd)" | ||
echo "Repository root: ${{ github.workspace }}" | ||
echo "Branch: ${{ github.ref }}" | ||
echo "List of tests" | ||
pytest ${{ github.workspace }}/llama_stack/providers/tests/ -k "fireworks and together" --config=github_ci_test_config.yaml | ||
# - name: Upload pytest test results | ||
# uses: actions/upload-artifact@v4 | ||
# with: | ||
# name: pytest_report.md | ||
# path: pytest_report.md | ||
# # Use always() to always run this step to publish test results when there are test failures | ||
# if: ${{ always() }} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
inference: | ||
tests: | ||
- inference/test_vision_inference.py::test_vision_chat_completion_streaming | ||
- inference/test_vision_inference.py::test_vision_chat_completion_non_streaming | ||
- inference/test_text_inference.py::test_structured_output | ||
- inference/test_text_inference.py::test_completion | ||
- inference/test_text_inference.py::test_chat_completion_streaming | ||
- inference/test_text_inference.py::test_chat_completion_non_streaming | ||
- inference/test_text_inference.py::test_chat_completion_with_tool_calling | ||
- inference/test_text_inference.py::test_chat_completion_with_tool_calling_streaming | ||
|
||
fixtures: | ||
provider_fixtures: | ||
- default_fixture_param_id: fireworks | ||
- inference: together | ||
|
||
inference_models: | ||
- meta-llama/Llama-3.1-8B-Instruct | ||
- meta-llama/Llama-3.2-11B-Vision-Instruct |