-
Notifications
You must be signed in to change notification settings - Fork 104
44 lines (40 loc) · 2.02 KB
/
maestro-tests.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
name: Maestro PyTest WorkFlow 🧪
on:
pull_request:
branches: [main, develop]
jobs:
build-dev-test:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
python-version: ["3.9", "3.10", "3.11", "3.12"]
steps:
- name: 🛎️ Checkout
uses: actions/checkout@v4
- name: 🐍 Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
check-latest: true
- name: 📦 Install dependencies
run: |
python -m pip install --upgrade pip
pip install torch
if [[ "${{ matrix.os }}" == "ubuntu-latest" ]]; then
if [[ "${{ matrix.python-version }}" == "3.10" ]]; then
pip install https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4cxx11abiTRUE-cp310-cp310-linux_x86_64.whl
elif [[ "${{ matrix.python-version }}" == "3.11" ]]; then
pip install https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4cxx11abiTRUE-cp311-cp311-linux_x86_64.whl
elif [[ "${{ matrix.python-version }}" == "3.12" ]]; then
pip install https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4cxx11abiTRUE-cp312-cp312-linux_x86_64.whl
elif [[ "${{ matrix.python-version }}" == "3.8" ]]; then
pip install https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4cxx11abiTRUE-cp38-cp38-linux_x86_64.whl
elif [[ "${{ matrix.python-version }}" == "3.9" ]]; then
pip install https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4cxx11abiTRUE-cp39-cp39-linux_x86_64.whl
fi
fi
pip install .
pip install pytest
- name: 🧪 Run Tests
run: "python -m pytest ./test"