Skip to content

Commit

Permalink
Merge branch 'v2.5.1' into v2.6.3
Browse files Browse the repository at this point in the history
  • Loading branch information
barbarahui committed Nov 1, 2023
2 parents b240667 + 4bf5b69 commit 35024b9
Show file tree
Hide file tree
Showing 7 changed files with 63 additions and 6 deletions.
42 changes: 42 additions & 0 deletions .github/workflows/pull_upstream.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
name: Sync Upstream

# https://github.com/marketplace/actions/sync-and-merge-upstream-repository-with-your-current-repository
env:
UPSTREAM_URL: "https://github.com/aws/aws-mwaa-local-runner.git"
WORKFLOW_TOKEN: ${{ secrets.WORKFLOW_TOKEN }}
UPSTREAM_BRANCH: "v2.5.1"
# Optional, defaults to UPSTREAM_BRANCH
DOWNSTREAM_BRANCH: ""
# Optional fetch arguments
FETCH_ARGS: ""
# Optional merge arguments
MERGE_ARGS: ""
# Optional push arguments
PUSH_ARGS: ""
# Optional toggle to spawn time logs (keeps action active)
SPAWN_LOGS: "false" # "true" or "false"

# This runs every day on 1201 UTC
on:
schedule:
- cron: '1 12 * * *'
# Allows manual workflow run (must be in default branch to work)
workflow_dispatch:

jobs:
build:
runs-on: ubuntu-latest
steps:
- name: GitHub Sync to Upstream Repository
uses: dabreadman/[email protected]
with:
upstream_repo: ${{ env.UPSTREAM_URL }}
upstream_branch: ${{ env.UPSTREAM_BRANCH }}
downstream_branch: ${{ env.DOWNSTREAM_BRANCH }}
token: ${{ env.WORKFLOW_TOKEN }}
fetch_args: ${{ env.FETCH_ARGS }}
merge_args: ${{ env.MERGE_ARGS }}
push_args: ${{ env.PUSH_ARGS }}
spawn_logs: ${{ env.SPAWN_LOGS }}


1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,4 @@ dags/**/*.pyc
/logs
/db-data
.DS_Store
/docker/.env
7 changes: 7 additions & 0 deletions docker/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# Environment variables set in this file will be used in the docker-compose files stored in the same directory
DAGS_HOME=""
PLUGINS_HOME=""
REQS_HOME=""
STARTUP_HOME=""
RIKOLTI_DATA_HOME=""
DOCKER_SOCKET=""
1 change: 1 addition & 0 deletions docker/config/.env.localrunner
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,4 @@ DEFAULT_PASSWORD="test"
S3_DAGS_PATH=""
S3_PLUGINS_PATH=""
S3_REQUIREMENTS_PATH=""

2 changes: 1 addition & 1 deletion docker/config/airflow.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ killed_task_cleanup_time = 60
# Whether to override params with dag_run.conf. If you pass some key-value pairs
# through ``airflow dags backfill -c`` or
# ``airflow dags trigger -c``, the key-value pairs will override the existing ones in params.
dag_run_conf_overrides_params = False
dag_run_conf_overrides_params = True

# When discovering DAGs, ignore any files that don't contain the strings ``DAG`` and ``airflow``.
dag_discovery_safe_mode = True
Expand Down
10 changes: 6 additions & 4 deletions docker/docker-compose-local.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,12 @@ services:
max-size: 10m
max-file: "3"
volumes:
- "${PWD}/dags:/usr/local/airflow/dags"
- "${PWD}/plugins:/usr/local/airflow/plugins"
- "${PWD}/requirements:/usr/local/airflow/requirements"
- "${PWD}/startup_script:/usr/local/airflow/startup"
- "${DAGS_HOME}:/usr/local/airflow/dags/rikolti"
- "${PLUGINS_HOME}:/usr/local/airflow/plugins"
- "${REQS_HOME}:/usr/local/airflow/requirements"
- "${STARTUP_HOME}:/usr/local/airflow/startup"
- "${RIKOLTI_DATA_HOME}:/usr/local/airflow/rikolti_data"
- "${DOCKER_SOCKET}:/var/run/docker.sock"
ports:
- "8080:8080"
command: local-runner
Expand Down
6 changes: 5 additions & 1 deletion docker/script/run-startup.sh
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
#!/usr/bin/env bash

source "$AIRFLOW_HOME/startup/startup.sh"
declare -p | grep -v '^declare \-[aAilnrtux]*r ' > stored_env
declare -p | grep -v '^declare \-[aAilnrtux]*r ' > stored_env

# allows the airflow user on the airflow docker container to access
# the host's docker.sock. This is needed for the DockerOperator to work.
sudo chmod 666 /var/run/docker.sock

0 comments on commit 35024b9

Please sign in to comment.