Skip to content

Commit 5c9de49

Browse files
kevjumbaadchia
authored andcommitted
chore: Add documentation for maintainer development (feast-dev#3025)
* Maintainer test docs Signed-off-by: Kevin Zhang <[email protected]> * Add links from contributing and development guide Signed-off-by: Kevin Zhang <[email protected]>
1 parent 9a64e77 commit 5c9de49

18 files changed

+526
-65
lines changed
Lines changed: 159 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,159 @@
1+
name: fork-pr-integration-tests-aws
2+
3+
on: [pull_request]
4+
5+
jobs:
6+
build-docker-image:
7+
if: github.repository == 'your github repo' # swap here with your project id
8+
runs-on: ubuntu-latest
9+
steps:
10+
- uses: actions/checkout@v3
11+
with:
12+
# pull_request_target runs the workflow in the context of the base repo
13+
# as such actions/checkout needs to be explicit configured to retrieve
14+
# code from the PR.
15+
ref: refs/pull/${{ github.event.pull_request.number }}/merge
16+
submodules: recursive
17+
- name: Set up QEMU
18+
uses: docker/setup-qemu-action@v1
19+
- name: Set up Docker Buildx
20+
uses: docker/setup-buildx-action@v2
21+
with:
22+
install: true
23+
- name: Set up AWS SDK
24+
uses: aws-actions/configure-aws-credentials@v1
25+
with:
26+
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
27+
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
28+
aws-region: us-west-2
29+
- name: Login to Amazon ECR
30+
id: login-ecr
31+
uses: aws-actions/amazon-ecr-login@v1
32+
- name: Set ECR image tag
33+
id: image-tag
34+
run: echo "::set-output name=DOCKER_IMAGE_TAG::`git rev-parse HEAD`"
35+
- name: Cache Public ECR Image
36+
id: lambda_python_3_9
37+
uses: actions/cache@v2
38+
with:
39+
path: ~/cache
40+
key: lambda_python_3_9
41+
- name: Handle Cache Miss (pull public ECR image & save it to tar file)
42+
if: steps.cache-primes.outputs.cache-hit != 'true'
43+
run: |
44+
mkdir -p ~/cache
45+
docker pull public.ecr.aws/lambda/python:3.9
46+
docker save public.ecr.aws/lambda/python:3.9 -o ~/cache/lambda_python_3_9.tar
47+
- name: Handle Cache Hit (load docker image from tar file)
48+
if: steps.cache-primes.outputs.cache-hit == 'true'
49+
run: |
50+
docker load -i ~/cache/lambda_python_3_9.tar
51+
- name: Build and push
52+
env:
53+
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
54+
ECR_REPOSITORY: feast-python-server
55+
run: |
56+
docker build \
57+
--file sdk/python/feast/infra/feature_servers/aws_lambda/Dockerfile \
58+
--tag $ECR_REGISTRY/$ECR_REPOSITORY:${{ steps.image-tag.outputs.DOCKER_IMAGE_TAG }} \
59+
--load \
60+
.
61+
docker push $ECR_REGISTRY/$ECR_REPOSITORY:${{ steps.image-tag.outputs.DOCKER_IMAGE_TAG }}
62+
outputs:
63+
DOCKER_IMAGE_TAG: ${{ steps.image-tag.outputs.DOCKER_IMAGE_TAG }}
64+
integration-test-python:
65+
if: github.repository == 'your github repo' # swap here with your project id
66+
runs-on: ${{ matrix.os }}
67+
strategy:
68+
fail-fast: false
69+
matrix:
70+
python-version: [ "3.8" ]
71+
os: [ ubuntu-latest ]
72+
env:
73+
OS: ${{ matrix.os }}
74+
PYTHON: ${{ matrix.python-version }}
75+
services:
76+
redis:
77+
image: redis
78+
ports:
79+
- 6379:6379
80+
options: >-
81+
--health-cmd "redis-cli ping"
82+
--health-interval 10s
83+
--health-timeout 5s
84+
--health-retries 5
85+
steps:
86+
- uses: actions/checkout@v2
87+
with:
88+
# pull_request_target runs the workflow in the context of the base repo
89+
# as such actions/checkout needs to be explicit configured to retrieve
90+
# code from the PR.
91+
ref: refs/pull/${{ github.event.pull_request.number }}/merge
92+
submodules: recursive
93+
- name: Setup Python
94+
uses: actions/setup-python@v2
95+
id: setup-python
96+
with:
97+
python-version: ${{ matrix.python-version }}
98+
architecture: x64
99+
- name: Setup Go
100+
id: setup-go
101+
uses: actions/setup-go@v2
102+
with:
103+
go-version: 1.18.0
104+
- name: Set up AWS SDK
105+
uses: aws-actions/configure-aws-credentials@v1
106+
with:
107+
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
108+
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
109+
aws-region: us-west-2
110+
- name: Use AWS CLI
111+
run: aws sts get-caller-identity
112+
- name: Upgrade pip version
113+
run: |
114+
pip install --upgrade "pip>=21.3.1,<22.1"
115+
- name: Get pip cache dir
116+
id: pip-cache
117+
run: |
118+
echo "::set-output name=dir::$(pip cache dir)"
119+
- name: pip cache
120+
uses: actions/cache@v2
121+
with:
122+
path: |
123+
${{ steps.pip-cache.outputs.dir }}
124+
/opt/hostedtoolcache/Python
125+
/Users/runner/hostedtoolcache/Python
126+
key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }}
127+
restore-keys: |
128+
${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-
129+
- name: Install pip-tools
130+
run: pip install pip-tools
131+
- name: Install apache-arrow on ubuntu
132+
if: matrix.os == 'ubuntu-latest'
133+
run: |
134+
sudo apt update
135+
sudo apt install -y -V ca-certificates lsb-release wget
136+
wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
137+
sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
138+
sudo apt update
139+
sudo apt install -y -V libarrow-dev
140+
- name: Install apache-arrow on macos
141+
if: matrix.os == 'macOS-latest'
142+
run: brew install apache-arrow
143+
- name: Install dependencies
144+
run: make install-python-ci-dependencies
145+
- name: Setup Redis Cluster
146+
run: |
147+
docker pull vishnunair/docker-redis-cluster:latest
148+
docker run -d -p 6001:6379 -p 6002:6380 -p 6003:6381 -p 6004:6382 -p 6005:6383 -p 6006:6384 --name redis-cluster vishnunair/docker-redis-cluster
149+
- name: Test python
150+
if: ${{ always() }} # this will guarantee that step won't be canceled and resources won't leak
151+
env:
152+
FEAST_SERVER_DOCKER_IMAGE_TAG: ${{ needs.build-docker-image.outputs.DOCKER_IMAGE_TAG }}
153+
run: |
154+
pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "aws and not Snowflake and not BigQuery"
155+
pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "File and not Snowflake and not BigQuery"
156+
pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "dynamo and not Snowflake and not BigQuery"
157+
pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "Redshift and not Snowflake and not BigQuery"
158+
159+
Lines changed: 97 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,97 @@
1+
name: fork-pr-integration-tests-gcp
2+
3+
on: [pull_request]
4+
5+
jobs:
6+
integration-test-python:
7+
if: github.repository == 'your github repo' # swap here with your project id
8+
runs-on: ${{ matrix.os }}
9+
strategy:
10+
fail-fast: false
11+
matrix:
12+
python-version: [ "3.8" ]
13+
os: [ ubuntu-latest ]
14+
env:
15+
OS: ${{ matrix.os }}
16+
PYTHON: ${{ matrix.python-version }}
17+
services:
18+
redis:
19+
image: redis
20+
ports:
21+
- 6379:6379
22+
options: >-
23+
--health-cmd "redis-cli ping"
24+
--health-interval 10s
25+
--health-timeout 5s
26+
--health-retries 5
27+
steps:
28+
- uses: actions/checkout@v2
29+
with:
30+
# pull_request_target runs the workflow in the context of the base repo
31+
# as such actions/checkout needs to be explicit configured to retrieve
32+
# code from the PR.
33+
ref: refs/pull/${{ github.event.pull_request.number }}/merge
34+
submodules: recursive
35+
- name: Setup Python
36+
uses: actions/setup-python@v2
37+
id: setup-python
38+
with:
39+
python-version: ${{ matrix.python-version }}
40+
architecture: x64
41+
- name: Setup Go
42+
id: setup-go
43+
uses: actions/setup-go@v2
44+
with:
45+
go-version: 1.18.0
46+
- name: Set up gcloud SDK
47+
uses: google-github-actions/setup-gcloud@v0
48+
with:
49+
project_id: ${{ secrets.GCP_PROJECT_ID }}
50+
service_account_key: ${{ secrets.GCP_SA_KEY }}
51+
export_default_credentials: true
52+
- name: Use gcloud CLI
53+
run: gcloud info
54+
- name: Upgrade pip version
55+
run: |
56+
pip install --upgrade "pip>=21.3.1,<22.1"
57+
- name: Get pip cache dir
58+
id: pip-cache
59+
run: |
60+
echo "::set-output name=dir::$(pip cache dir)"
61+
- name: pip cache
62+
uses: actions/cache@v2
63+
with:
64+
path: |
65+
${{ steps.pip-cache.outputs.dir }}
66+
/opt/hostedtoolcache/Python
67+
/Users/runner/hostedtoolcache/Python
68+
key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }}
69+
restore-keys: |
70+
${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-
71+
- name: Install pip-tools
72+
run: pip install pip-tools
73+
- name: Install apache-arrow on ubuntu
74+
if: matrix.os == 'ubuntu-latest'
75+
run: |
76+
sudo apt update
77+
sudo apt install -y -V ca-certificates lsb-release wget
78+
wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
79+
sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
80+
sudo apt update
81+
sudo apt install -y -V libarrow-dev
82+
- name: Install apache-arrow on macos
83+
if: matrix.os == 'macOS-latest'
84+
run: brew install apache-arrow
85+
- name: Install dependencies
86+
run: make install-python-ci-dependencies
87+
- name: Setup Redis Cluster
88+
run: |
89+
docker pull vishnunair/docker-redis-cluster:latest
90+
docker run -d -p 6001:6379 -p 6002:6380 -p 6003:6381 -p 6004:6382 -p 6005:6383 -p 6006:6384 --name redis-cluster vishnunair/docker-redis-cluster
91+
- name: Test python
92+
if: ${{ always() }} # this will guarantee that step won't be canceled and resources won't leak
93+
# Run only BigQuery and File tests without dynamo and redshift tests.
94+
run: |
95+
pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "BigQuery and not dynamo and not Redshift and not Snowflake"
96+
pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "File and not dynamo and not Redshift and not Snowflake"
97+
Lines changed: 96 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,96 @@
1+
name: fork-pr-integration-tests-snowflake
2+
3+
on: [pull_request]
4+
5+
jobs:
6+
integration-test-python:
7+
if: github.repository == 'your github repo' # swap here with your project id
8+
runs-on: ${{ matrix.os }}
9+
strategy:
10+
fail-fast: false
11+
matrix:
12+
python-version: [ "3.8" ]
13+
os: [ ubuntu-latest ]
14+
env:
15+
OS: ${{ matrix.os }}
16+
PYTHON: ${{ matrix.python-version }}
17+
services:
18+
redis:
19+
image: redis
20+
ports:
21+
- 6379:6379
22+
options: >-
23+
--health-cmd "redis-cli ping"
24+
--health-interval 10s
25+
--health-timeout 5s
26+
--health-retries 5
27+
steps:
28+
- uses: actions/checkout@v2
29+
with:
30+
# pull_request_target runs the workflow in the context of the base repo
31+
# as such actions/checkout needs to be explicit configured to retrieve
32+
# code from the PR.
33+
ref: refs/pull/${{ github.event.pull_request.number }}/merge
34+
submodules: recursive
35+
- name: Setup Python
36+
uses: actions/setup-python@v2
37+
id: setup-python
38+
with:
39+
python-version: ${{ matrix.python-version }}
40+
architecture: x64
41+
- name: Setup Go
42+
id: setup-go
43+
uses: actions/setup-go@v2
44+
with:
45+
go-version: 1.18.0
46+
47+
- name: Upgrade pip version
48+
run: |
49+
pip install --upgrade "pip>=21.3.1,<22.1"
50+
- name: Get pip cache dir
51+
id: pip-cache
52+
run: |
53+
echo "::set-output name=dir::$(pip cache dir)"
54+
- name: pip cache
55+
uses: actions/cache@v2
56+
with:
57+
path: |
58+
${{ steps.pip-cache.outputs.dir }}
59+
/opt/hostedtoolcache/Python
60+
/Users/runner/hostedtoolcache/Python
61+
key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }}
62+
restore-keys: |
63+
${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-
64+
- name: Install pip-tools
65+
run: pip install pip-tools
66+
- name: Install apache-arrow on ubuntu
67+
if: matrix.os == 'ubuntu-latest'
68+
run: |
69+
sudo apt update
70+
sudo apt install -y -V ca-certificates lsb-release wget
71+
wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
72+
sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
73+
sudo apt update
74+
sudo apt install -y -V libarrow-dev
75+
- name: Install apache-arrow on macos
76+
if: matrix.os == 'macOS-latest'
77+
run: brew install apache-arrow
78+
- name: Install dependencies
79+
run: make install-python-ci-dependencies
80+
- name: Setup Redis Cluster
81+
run: |
82+
docker pull vishnunair/docker-redis-cluster:latest
83+
docker run -d -p 6001:6379 -p 6002:6380 -p 6003:6381 -p 6004:6382 -p 6005:6383 -p 6006:6384 --name redis-cluster vishnunair/docker-redis-cluster
84+
- name: Test python
85+
if: ${{ always() }} # this will guarantee that step won't be canceled and resources won't leak
86+
env:
87+
SNOWFLAKE_CI_DEPLOYMENT: ${{ secrets.SNOWFLAKE_CI_DEPLOYMENT }}
88+
SNOWFLAKE_CI_USER: ${{ secrets.SNOWFLAKE_CI_USER }}
89+
SNOWFLAKE_CI_PASSWORD: ${{ secrets.SNOWFLAKE_CI_PASSWORD }}
90+
SNOWFLAKE_CI_ROLE: ${{ secrets.SNOWFLAKE_CI_ROLE }}
91+
SNOWFLAKE_CI_WAREHOUSE: ${{ secrets.SNOWFLAKE_CI_WAREHOUSE }}
92+
# Run only Snowflake BigQuery and File tests without dynamo and redshift tests.
93+
run: |
94+
pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "Snowflake and not dynamo and not Redshift and not Bigquery and not gcp"
95+
pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests --integration --durations=5 --timeout=1200 --timeout_method=thread -k "File and not dynamo and not Redshift and not Bigquery and not gcp"
96+

.github/workflows/java_master_only.yml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ on:
99

1010
jobs:
1111
build-docker-images:
12+
if: github.repository == 'feast-dev/feast'
1213
runs-on: ubuntu-latest
1314
strategy:
1415
matrix:
@@ -46,6 +47,7 @@ jobs:
4647
fi
4748
4849
lint-java:
50+
if: github.repository == 'feast-dev/feast'
4951
runs-on: ubuntu-latest
5052
steps:
5153
- uses: actions/checkout@v2
@@ -55,6 +57,7 @@ jobs:
5557
run: make lint-java
5658

5759
unit-test-java:
60+
if: github.repository == 'feast-dev/feast'
5861
runs-on: ubuntu-latest
5962
steps:
6063
- uses: actions/checkout@v2
@@ -80,6 +83,7 @@ jobs:
8083
path: ${{ github.workspace }}/docs/coverage/java/target/site/jacoco-aggregate/
8184

8285
integration-test:
86+
if: github.repository == 'feast-dev/feast'
8387
runs-on: ubuntu-latest
8488
steps:
8589
- uses: actions/checkout@v2

0 commit comments

Comments
 (0)