|
53 | 53 |
|
54 | 54 | import airflow |
55 | 55 | from airflow import settings |
56 | | -from airflow.jobs.base_job import BaseJob |
57 | 56 | from airflow.models import ( |
58 | 57 | DAG, |
59 | 58 | DagModel, |
|
101 | 100 | # List of all the objects that will be deleted. Comment out the DB objects you |
102 | 101 | # want to skip. |
103 | 102 | DATABASE_OBJECTS = [ |
104 | | - { |
105 | | - "airflow_db_model": BaseJob, |
106 | | - "age_check_column": BaseJob.latest_heartbeat, |
107 | | - "keep_last": False, |
108 | | - "keep_last_filters": None, |
109 | | - "keep_last_group_by": None, |
110 | | - }, |
111 | 103 | { |
112 | 104 | "airflow_db_model": DagRun, |
113 | 105 | "age_check_column": DagRun.execution_date, |
|
228 | 220 | except Exception as e: |
229 | 221 | logging.error(e) |
230 | 222 |
|
| 223 | +if AIRFLOW_VERSION < ["2", "6", "0"]: |
| 224 | + try: |
| 225 | + from airflow.jobs.base_job import BaseJob |
| 226 | + DATABASE_OBJECTS.append( |
| 227 | + { |
| 228 | + "airflow_db_model": BaseJob, |
| 229 | + "age_check_column": BaseJob.latest_heartbeat, |
| 230 | + "keep_last": False, |
| 231 | + "keep_last_filters": None, |
| 232 | + "keep_last_group_by": None, |
| 233 | + } |
| 234 | + ) |
| 235 | + except Exception as e: |
| 236 | + logging.error(e) |
| 237 | +else: |
| 238 | + try: |
| 239 | + from airflow.jobs.job import Job |
| 240 | + DATABASE_OBJECTS.append( |
| 241 | + { |
| 242 | + "airflow_db_model": Job, |
| 243 | + "age_check_column": Job.latest_heartbeat, |
| 244 | + "keep_last": False, |
| 245 | + "keep_last_filters": None, |
| 246 | + "keep_last_group_by": None, |
| 247 | + } |
| 248 | + ) |
| 249 | + except Exception as e: |
| 250 | + logging.error(e) |
| 251 | + |
231 | 252 | default_args = { |
232 | 253 | "owner": DAG_OWNER_NAME, |
233 | 254 | "depends_on_past": False, |
|
0 commit comments