Skip to content

Commit

Permalink
chore: add ci
Browse files Browse the repository at this point in the history
  • Loading branch information
appflowy committed Jul 9, 2024
1 parent 6b51c89 commit c8e4caf
Showing 1 changed file with 183 additions and 0 deletions.
183 changes: 183 additions & 0 deletions .github/workflows/integration_test.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,183 @@
name: Build, Sign, and Run Tests

on:
push:
branches:
- main
pull_request:
branches:
- main

jobs:
run_test:
needs: build
runs-on: ${{ matrix.os }}

strategy:
matrix:
os: [
macos-latest,
windows-latest,
ubuntu-latest
]
build_type: [ debug ]

steps:
- name: Checkout AppFlowy AI
uses: actions/checkout@v4
with:
repository: AppFlowy-IO/AppFlowy-AI
path: AppFlowy-AI

- name: Get latest tag
id: get-latest-tag
working-directory: AppFlowy-AI
run: |
echo "##[set-output name=tag;]$(git describe --tags `git rev-list --tags --max-count=1`)"
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.LOCAL_AI_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.LOCAL_AI_AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.LOCAL_AI_AWS_REGION }}

- name: Download artifact from S3 (Unix)
if: ${{ matrix.os != 'windows-latest' }}
run: |
aws s3 cp s3://appflowy-local-ai/${{ steps.get-latest-tag.outputs.tag }}/${{ matrix.os }}_${{ matrix.build_type }}.zip AppFlowyLLM_${{ matrix.os }}_${{ matrix.build_type }}.zip
unzip AppFlowyLLM_${{ matrix.os }}_${{ matrix.build_type }}.zip -d AppFlowyLLM
cd AppFlowyLLM
ls
shell: bash

- name: Download artifact from S3 (Windows)
if: ${{ matrix.os == 'windows-latest' }}
run: |
aws s3 cp s3://appflowy-local-ai/${{ steps.get-latest-tag.outputs.tag }}/${{ matrix.os }}_${{ matrix.build_type }}.zip AppFlowyLLM_${{ matrix.os }}_${{ matrix.build_type }}.zip
Expand-Archive -Path AppFlowyLLM_${{ matrix.os }}_${{ matrix.build_type }}.zip -DestinationPath AppFlowyLLM
cd AppFlowyLLM
dir
shell: powershell

- name: Download models (Unix)
if: ${{ matrix.os != 'windows-latest' }}
run: |
curl -sSL -o tinyllama.gguf "https://huggingface.co/TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF/resolve/main/tinyllama-1.1b-chat-v1.0.Q4_0.gguf?download=true"
curl -sSL -o all-MiniLM-L12-v2.F16.gguf "https://huggingface.co/leliuga/all-MiniLM-L12-v2-GGUF/resolve/main/all-MiniLM-L12-v2.F16.gguf?download=true"
ls
- name: Download models (Windows)
if: ${{ matrix.os == 'windows-latest' }}
run: |
Invoke-WebRequest -Uri "https://huggingface.co/TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF/resolve/main/tinyllama-1.1b-chat-v1.0.Q4_0.gguf?download=true" -OutFile tinyllama.gguf
Invoke-WebRequest -Uri "https://huggingface.co/leliuga/all-MiniLM-L12-v2-GGUF/resolve/main/all-MiniLM-L12-v2.F16.gguf?download=true" -OutFile all-MiniLM-L12-v2.F16.gguf
dir
- name: Checkout code
uses: actions/checkout@v3

- name: Prepare env (macOS)
if: ${{ matrix.os == 'macos-latest' }}
run: |
ABS_PATH=$(pwd)
chat_bin_path=$ABS_PATH/AppFlowyLLM/chat_plugin
embedding_bin_path=$ABS_PATH/AppFlowyLLM/embedding_plugin
cd AppFlowy-LocalAI
cp dev.env .env
sed -i '' 's|RUST_LOG=.*|RUST_LOG=trace|' .env
# binary
sed -i '' "s|CHAT_BIN_PATH=.*|CHAT_BIN_PATH=$chat_bin_path|" .env
sed -i '' "s|EMBEDDING_BIN_PATH=.*|EMBEDDING_BIN_PATH=$embedding_bin_path|" .env
# model
sed -i '' "s|LOCAL_AI_MODEL_DIR=.*|LOCAL_AI_MODEL_DIR=$ABS_PATH|" .env
sed -i '' 's|LOCAL_AI_CHAT_MODEL_NAME=.*|LOCAL_AI_CHAT_MODEL_NAME=tinyllama.gguf|' .env
sed -i '' 's|LOCAL_AI_EMBEDDING_MODEL_NAME=.*|LOCAL_AI_EMBEDDING_MODEL_NAME=all-MiniLM-L12-v2.F16.gguf|' .env
cat .env
shell: bash

- name: Prepare env (Linux)
if: ${{ matrix.os == 'ubuntu-latest' }}
run: |
ABS_PATH=$(pwd)
chat_bin_path=$ABS_PATH/AppFlowyLLM/chat_plugin
embedding_bin_path=$ABS_PATH/AppFlowyLLM/embedding_plugin
cd AppFlowy-LocalAI
cp dev.env .env
sed -i 's|RUST_LOG=.*|RUST_LOG=trace|' .env
# binary
sed -i "s|CHAT_BIN_PATH=.*|CHAT_BIN_PATH=$chat_bin_path|" .env
sed -i "s|EMBEDDING_BIN_PATH=.*|EMBEDDING_BIN_PATH=$embedding_bin_path|" .env
# model
sed -i "s|LOCAL_AI_MODEL_DIR=.*|LOCAL_AI_MODEL_DIR=$ABS_PATH|" .env
sed -i 's|LOCAL_AI_CHAT_MODEL_NAME=.*|LOCAL_AI_CHAT_MODEL_NAME=tinyllama.gguf|' .env
sed -i 's|LOCAL_AI_EMBEDDING_MODEL_NAME=.*|LOCAL_AI_EMBEDDING_MODEL_NAME=all-MiniLM-L12-v2.F16.gguf|' .env
cat .env
shell: bash

- name: Prepare env (Windows)
if: ${{ matrix.os == 'windows-latest' }}
run: |
$ErrorActionPreference = 'Stop'
try {
$ABS_PATH = $PWD.Path
$chat_bin_path = Join-Path $ABS_PATH 'AppFlowyLLM' 'chat_plugin.exe'
$embedding_bin_path = Join-Path $ABS_PATH 'AppFlowyLLM' 'embedding_plugin.exe'
Set-Location -Path 'AppFlowy-LocalAI'
Copy-Item -Path 'dev.env' -Destination '.env' -Force
$envContent = Get-Content '.env'
$envContent = $envContent -replace 'RUST_LOG=.*', 'RUST_LOG=trace'
$envContent = $envContent -replace 'CHAT_BIN_PATH=.*', "CHAT_BIN_PATH=$chat_bin_path"
$envContent = $envContent -replace 'EMBEDDING_BIN_PATH=.*', "EMBEDDING_BIN_PATH=$embedding_bin_path"
$envContent = $envContent -replace 'LOCAL_AI_MODEL_DIR=.*', "LOCAL_AI_MODEL_DIR=$ABS_PATH"
$envContent = $envContent -replace 'LOCAL_AI_CHAT_MODEL_NAME=.*', 'LOCAL_AI_CHAT_MODEL_NAME=tinyllama.gguf'
$envContent = $envContent -replace 'LOCAL_AI_EMBEDDING_MODEL_NAME=.*', 'LOCAL_AI_EMBEDDING_MODEL_NAME=all-MiniLM-L12-v2.F16.gguf'
$envContent | Set-Content '.env'
Get-Content '.env'
Write-Host "Environment setup completed successfully."
}
catch {
Write-Host "An error occurred during environment setup: $_"
exit 1
}
shell: pwsh

- name: Install Rust toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: 1.77.2
override: true
profile: minimal

- name: Run tests
if: ${{ matrix.os != 'windows-latest' }}
working-directory: AppFlowy-LocalAI
run: cargo test ci_
shell: bash

- name: Load .env file
if: ${{ matrix.os == 'windows-latest' }}
run: |
Get-Content .env | ForEach-Object {
if ($_ -match '^([^=]+)=(.*)$') {
$name = $matches[1]
$value = $matches[2]
Write-Output "$name=$value" >> $env:GITHUB_ENV
}
}
shell: powershell
working-directory: AppFlowy-LocalAI

- name: Run tests (Windows)
if: ${{ matrix.os == 'windows-latest' }}
working-directory: AppFlowy-LocalAI
run: cargo test ci_
shell: powershell

0 comments on commit c8e4caf

Please sign in to comment.