Skip to content

Commit

Permalink
Merge branch 'intel:main' into fix-fine-tuning-bugs
Browse files Browse the repository at this point in the history
  • Loading branch information
harborn authored Jul 17, 2024
2 parents 3ff4f88 + df24aa2 commit e1b0418
Show file tree
Hide file tree
Showing 11 changed files with 36 additions and 32 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/workflow_finetune.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@ on:
default: '10.1.2.13:5000/llmray-build'
http_proxy:
type: string
default: 'http://10.24.221.169:912'
default: 'http://proxy-prc.intel.com:912'
https_proxy:
type: string
default: 'http://10.24.221.169:912'
default: 'http://proxy-prc.intel.com:912'
runner_config_path:
type: string
default: '/home/ci/llm-ray-actions-runner'
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/workflow_finetune_gpu.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@ on:
default: '10.1.2.13:5000/llmray-build'
http_proxy:
type: string
default: 'http://10.24.221.169:912'
default: 'http://proxy-prc.intel.com:912'
https_proxy:
type: string
default: 'http://10.24.221.169:912'
default: 'http://proxy-prc.intel.com:912'

jobs:
finetune-gpu:
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/workflow_inference.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@ on:
default: '10.1.2.13:5000/llmray-build'
http_proxy:
type: string
default: 'http://10.24.221.169:912'
default: 'http://proxy-prc.intel.com:912'
https_proxy:
type: string
default: 'http://10.24.221.169:912'
default: 'http://proxy-prc.intel.com:912'
runner_config_path:
type: string
default: '/home/ci/llm-ray-actions-runner'
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/workflow_orders_nightly.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
name: Nightly

on:
schedule:
- cron: "0 16 * * *"
on: []
# schedule:
# - cron: "0 16 * * *"

jobs:

Expand Down
24 changes: 12 additions & 12 deletions .github/workflows/workflow_orders_on_merge.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,18 @@ jobs:
needs: Lint
uses: ./.github/workflows/workflow_tests.yml

Inference:
needs: Lint
uses: ./.github/workflows/workflow_inference.yml
# Inference:
# needs: Lint
# uses: ./.github/workflows/workflow_inference.yml

Inference_Gaudi:
needs: Lint
uses: ./.github/workflows/workflow_inference_gaudi2.yml
# Inference_Gaudi:
# needs: Lint
# uses: ./.github/workflows/workflow_inference_gaudi2.yml

Finetune:
needs: Lint
uses: ./.github/workflows/workflow_finetune.yml
# Finetune:
# needs: Lint
# uses: ./.github/workflows/workflow_finetune.yml

Benchmark:
needs: Lint
uses: ./.github/workflows/workflow_test_benchmark.yml
# Benchmark:
# needs: Lint
# uses: ./.github/workflows/workflow_test_benchmark.yml
18 changes: 9 additions & 9 deletions .github/workflows/workflow_orders_on_pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,14 @@ jobs:
needs: Lint
uses: ./.github/workflows/workflow_tests.yml

Inference:
needs: Lint
uses: ./.github/workflows/workflow_inference.yml
# Inference:
# needs: Lint
# uses: ./.github/workflows/workflow_inference.yml

Inference_Gaudi:
needs: Lint
uses: ./.github/workflows/workflow_inference_gaudi2.yml
# Inference_Gaudi:
# needs: Lint
# uses: ./.github/workflows/workflow_inference_gaudi2.yml

Finetune:
needs: Lint
uses: ./.github/workflows/workflow_finetune.yml
# Finetune:
# needs: Lint
# uses: ./.github/workflows/workflow_finetune.yml
1 change: 1 addition & 0 deletions dev/docker/Dockerfile.cpu_and_deepspeed
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
FROM ubuntu:22.04

ENV LANG C.UTF-8
ENV HF_ENDPOINT "https://hf-mirror.com"

WORKDIR /root/llm-on-ray

Expand Down
1 change: 1 addition & 0 deletions dev/docker/Dockerfile.cpu_and_deepspeed.pip_non_editable
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
FROM ubuntu:22.04

ENV LANG C.UTF-8
ENV HF_ENDPOINT "https://hf-mirror.com"

WORKDIR /root/llm-on-ray

Expand Down
1 change: 1 addition & 0 deletions dev/docker/Dockerfile.ipex-llm
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
FROM ubuntu:22.04

ENV LANG C.UTF-8
ENV HF_ENDPOINT "https://hf-mirror.com"

WORKDIR /root/llm-on-ray

Expand Down
1 change: 1 addition & 0 deletions dev/docker/Dockerfile.vllm
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
FROM ubuntu:22.04

ENV LANG C.UTF-8
ENV HF_ENDPOINT "https://hf-mirror.com"

WORKDIR /root/llm-on-ray

Expand Down
4 changes: 2 additions & 2 deletions dev/scripts/ci-functions.sh
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
#!/usr/bin/env bash
set -eo pipefail

HTTP_PROXY='http://10.24.221.169:912'
HTTPS_PROXY='http://10.24.221.169:912'
HTTP_PROXY='http://proxy-prc.intel.com:912'
HTTPS_PROXY='http://proxy-prc.intel.com:912'
MODEL_CACHE_PATH_LOACL='/root/.cache/huggingface/hub'
CODE_CHECKOUT_PATH_LOCAL='/root/llm-on-ray'

Expand Down

0 comments on commit e1b0418

Please sign in to comment.