Skip to content

formatting: 🧹 -> 📦 project formatting,ruff,bandit,mypy configurations are added #21

formatting: 🧹 -> 📦 project formatting,ruff,bandit,mypy configurations are added

formatting: 🧹 -> 📦 project formatting,ruff,bandit,mypy configurations are added #21

Workflow file for this run

name: Test WorkFlow
on:
pull_request:
branches: [main, develop]
jobs:
build-dev-test:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
python-version: ["3.9", "3.10", "3.11", "3.12"]
steps:
- name: 🛎️ Checkout
uses: actions/checkout@v4
- name: 🐍 Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
check-latest: true
- name: 📦 Install dependencies
run: |
python -m pip install --upgrade pip
pip install torch
if [[ "${{ matrix.os }}" == "ubuntu-latest" ]]; then
if [[ "${{ matrix.python-version }}" == "3.10" ]]; then
pip install https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4cxx11abiTRUE-cp310-cp310-linux_x86_64.whl
elif [[ "${{ matrix.python-version }}" == "3.11" ]]; then
pip install https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4cxx11abiTRUE-cp311-cp311-linux_x86_64.whl
elif [[ "${{ matrix.python-version }}" == "3.12" ]]; then
pip install https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4cxx11abiTRUE-cp312-cp312-linux_x86_64.whl
elif [[ "${{ matrix.python-version }}" == "3.8" ]]; then
pip install https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4cxx11abiTRUE-cp38-cp38-linux_x86_64.whl
elif [[ "${{ matrix.python-version }}" == "3.9" ]]; then
pip install https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4cxx11abiTRUE-cp39-cp39-linux_x86_64.whl
fi
fi
pip install .
pip install pytest
- name: 🧪 Test
run: "python -m pytest ./test"