Coverage for afcli/__init__.py: 20%
339 statements
« prev ^ index » next coverage.py v7.9.1, created at 2025-06-22 19:11 -0500
« prev ^ index » next coverage.py v7.9.1, created at 2025-06-22 19:11 -0500
1#!/usr/bin/env python3
2"""
3Airflow CLI wrapper - A command-line utility for interacting with Airflow REST API
4"""
6import argparse
7import json
8import os
9import sys
10from datetime import datetime, timezone
11from typing import Optional, Dict, Any, List
12from tabulate import tabulate
13from colorama import init, Fore, Style
14import airflow_client.client
15from airflow_client.client.api import dag_api, dag_run_api, task_instance_api
16from airflow_client.client.exceptions import OpenApiException
17import requests
19# Initialize colorama for cross-platform color support
20init(autoreset=True)
23class AirflowClient:
24 """Client for interacting with Airflow REST API using official apache-airflow-client"""
26 def __init__(self, host: str = "localhost:8080", username: Optional[str] = None, password: Optional[str] = None):
27 self.host = host
28 self.base_url = f"http://{host}" # Don't include /api/v2 here - the client adds it
29 self.auth_url = f"http://{host}/auth/token"
31 # Get JWT token if credentials provided
32 access_token = None
33 if username and password:
34 access_token = self._get_jwt_token(username, password)
36 # Configure the API client with JWT token
37 configuration = airflow_client.client.Configuration(
38 host=self.base_url,
39 access_token=access_token,
40 )
42 # Create API client
43 self.api_client = airflow_client.client.ApiClient(configuration)
45 # Initialize API instances
46 self.dag_api = dag_api.DAGApi(self.api_client)
47 self.dag_run_api = dag_run_api.DagRunApi(self.api_client)
48 self.task_instance_api = task_instance_api.TaskInstanceApi(self.api_client)
50 def _get_jwt_token(self, username: str, password: str) -> str:
51 """Get JWT token from Airflow auth endpoint"""
52 try:
53 response = requests.post(
54 self.auth_url,
55 json={"username": username, "password": password},
56 headers={"Content-Type": "application/json"}
57 )
58 response.raise_for_status()
60 token_data = response.json()
61 if 'access_token' in token_data:
62 return token_data['access_token']
63 else:
64 print(f"{Fore.RED}No access token in response{Style.RESET_ALL}")
65 sys.exit(1)
67 except requests.exceptions.HTTPError as e:
68 print(f"{Fore.RED}Authentication failed: {e}{Style.RESET_ALL}")
69 if e.response:
70 print(f"{Fore.RED}Response: {e.response.text}{Style.RESET_ALL}")
71 sys.exit(1)
72 except Exception as e:
73 print(f"{Fore.RED}Failed to authenticate: {e}{Style.RESET_ALL}")
74 sys.exit(1)
76 def _handle_api_error(self, e: OpenApiException, operation: str):
77 """Handle API errors with user-friendly messages"""
78 if e.status == 401:
79 print(f"{Fore.RED}Authentication failed. Please check your credentials.{Style.RESET_ALL}")
80 elif e.status == 404:
81 print(f"{Fore.RED}Resource not found for operation: {operation}{Style.RESET_ALL}")
82 elif e.status == 403:
83 print(f"{Fore.RED}Access forbidden for operation: {operation}{Style.RESET_ALL}")
84 else:
85 print(f"{Fore.RED}API Error ({e.status}): {e.reason}{Style.RESET_ALL}")
86 sys.exit(1)
88 def list_dags(self, limit: int = 100, only_active: bool = True) -> List[Dict[str, Any]]:
89 """List all DAGs"""
90 try:
91 # Convert only_active to paused parameter (inverted logic)
92 paused = None if not only_active else False
93 response = self.dag_api.get_dags(limit=limit, paused=paused)
94 return [dag.to_dict() for dag in response.dags] if response.dags else []
95 except OpenApiException as e:
96 self._handle_api_error(e, "list DAGs")
98 def get_dag(self, dag_id: str) -> Dict[str, Any]:
99 """Get DAG details"""
100 try:
101 response = self.dag_api.get_dag(dag_id)
102 return response.to_dict()
103 except OpenApiException as e:
104 self._handle_api_error(e, f"get DAG {dag_id}")
106 def get_dag_runs(self, dag_id: str, limit: int = 1) -> List[Dict[str, Any]]:
107 """Get DAG runs"""
108 try:
109 response = self.dag_run_api.get_dag_runs(dag_id, limit=limit)
110 return [run.to_dict() for run in response.dag_runs] if response.dag_runs else []
111 except OpenApiException as e:
112 self._handle_api_error(e, f"get DAG runs for {dag_id}")
114 def toggle_dag_pause(self, dag_id: str, is_paused: bool) -> Dict[str, Any]:
115 """Toggle DAG pause state"""
116 try:
117 dag_update = airflow_client.client.DAGPatchBody(is_paused=is_paused)
118 response = self.dag_api.patch_dag(dag_id, dag_update)
119 return response.to_dict()
120 except OpenApiException as e:
121 self._handle_api_error(e, f"toggle pause for DAG {dag_id}")
123 def trigger_dag(self, dag_id: str, config: Optional[Dict[str, Any]] = None,
124 logical_date: Optional[str] = None, dag_run_id: Optional[str] = None) -> Dict[str, Any]:
125 """Trigger a DAG run"""
126 try:
127 # If no logical_date provided, use current time
128 if logical_date is None:
129 logical_date = datetime.now(timezone.utc).isoformat().replace('+00:00', 'Z')
131 dag_run = airflow_client.client.TriggerDAGRunPostBody(
132 logical_date=logical_date,
133 conf=config or {},
134 dag_run_id=dag_run_id
135 )
137 response = self.dag_run_api.post_dag_run(dag_id, dag_run)
138 return response.to_dict()
139 except OpenApiException as e:
140 self._handle_api_error(e, f"trigger DAG {dag_id}")
142 def get_task_instances(self, dag_id: str, dag_run_id: str) -> List[Dict[str, Any]]:
143 """Get task instances for a DAG run"""
144 try:
145 response = self.task_instance_api.get_task_instances(dag_id, dag_run_id)
146 return [task.to_dict() for task in response.task_instances] if response.task_instances else []
147 except OpenApiException as e:
148 self._handle_api_error(e, f"get task instances for {dag_id}/{dag_run_id}")
150 def get_task_log(self, dag_id: str, dag_run_id: str, task_id: str, task_try_number: int = 1) -> str:
151 """Get task log"""
152 try:
153 response = self.task_instance_api.get_log(dag_id, dag_run_id, task_id, task_try_number)
154 return response.content if hasattr(response, 'content') else str(response)
155 except OpenApiException as e:
156 self._handle_api_error(e, f"get logs for {dag_id}/{dag_run_id}/{task_id}")
158 def clear_task_instance(self, dag_id: str, dag_run_id: str, task_id: str) -> Dict[str, Any]:
159 """Clear a task instance"""
160 try:
161 clear_request = airflow_client.client.ClearTaskInstancesBody(
162 dry_run=False,
163 task_ids=[task_id],
164 only_failed=True,
165 only_running=False,
166 include_subdags=True,
167 include_parentdag=True,
168 reset_dag_runs=False
169 )
170 response = self.dag_api.post_clear_task_instances(dag_id, clear_request)
171 return response.to_dict() if hasattr(response, 'to_dict') else {}
172 except OpenApiException as e:
173 self._handle_api_error(e, f"clear task {task_id} for {dag_id}")
176def format_datetime(dt_str: Optional[str]) -> str:
177 """Format datetime string for display"""
178 if not dt_str:
179 return "N/A"
180 try:
181 dt = datetime.fromisoformat(dt_str.replace('Z', '+00:00'))
182 return dt.strftime("%Y-%m-%d %H:%M:%S")
183 except:
184 return dt_str
187def get_status_color(state: str) -> str:
188 """Get color for task/dag state"""
189 state_colors = {
190 "success": Fore.GREEN,
191 "failed": Fore.RED,
192 "running": Fore.YELLOW,
193 "queued": Fore.CYAN,
194 "scheduled": Fore.BLUE,
195 "skipped": Fore.MAGENTA,
196 "up_for_retry": Fore.YELLOW,
197 "up_for_reschedule": Fore.YELLOW,
198 "deferred": Fore.CYAN,
199 "removed": Fore.LIGHTBLACK_EX,
200 "restarting": Fore.YELLOW
201 }
202 return state_colors.get(state.lower(), Fore.WHITE)
205def cmd_list(client: AirflowClient, args):
206 """List all DAGs"""
207 dags = client.list_dags(limit=args.limit, only_active=not args.all)
209 if not dags:
210 print(f"{Fore.YELLOW}No DAGs found{Style.RESET_ALL}")
211 return
213 print(f"\n{Fore.CYAN}Available DAGs:{Style.RESET_ALL}")
215 headers = ["DAG ID", "Is Paused", "Schedule", "Tags", "Next Run", "Import Errors"]
216 rows = []
218 for dag in sorted(dags, key=lambda x: x['dag_id']):
219 # Color code based on status
220 if dag.get('has_import_errors', False):
221 dag_id_display = f"{Fore.RED}{dag['dag_id']}{Style.RESET_ALL}"
222 elif dag.get('is_paused', True):
223 dag_id_display = f"{Fore.YELLOW}{dag['dag_id']}{Style.RESET_ALL}"
224 else:
225 dag_id_display = f"{Fore.GREEN}{dag['dag_id']}{Style.RESET_ALL}"
227 # Get schedule info
228 schedule = dag.get('timetable_summary') or dag.get('timetable_description', 'None')
230 # Extract tag names from tag objects
231 tags = dag.get('tags', [])
232 tag_names = [tag['name'] if isinstance(tag, dict) else str(tag) for tag in tags]
234 rows.append([
235 dag_id_display,
236 f"{Fore.RED if dag.get('is_paused', True) else Fore.GREEN}{'Paused' if dag.get('is_paused', True) else 'Active'}{Style.RESET_ALL}",
237 schedule,
238 ', '.join(tag_names) or 'None',
239 format_datetime(dag.get('next_dagrun_run_after')),
240 f"{Fore.RED}Yes{Style.RESET_ALL}" if dag.get('has_import_errors', False) else "No"
241 ])
243 print(tabulate(rows, headers=headers, tablefmt="grid"))
245 # Summary
246 total_count = len(dags)
247 paused_count = sum(1 for dag in dags if dag.get('is_paused', True))
248 active_count = total_count - paused_count
249 error_count = sum(1 for dag in dags if dag.get('has_import_errors', False))
251 print(f"\n{Fore.CYAN}Summary:{Style.RESET_ALL}")
252 print(f" Total DAGs: {total_count}")
253 print(f" Active: {active_count}")
254 print(f" Paused: {paused_count}")
255 if error_count > 0:
256 print(f" {Fore.RED}Import Errors: {error_count}{Style.RESET_ALL}")
259def cmd_status(client: AirflowClient, args):
260 """View DAG status"""
261 dag = client.get_dag(args.dag_id)
262 dag_runs = client.get_dag_runs(args.dag_id, limit=5)
264 print(f"\n{Fore.CYAN}DAG: {dag['dag_id']}{Style.RESET_ALL}")
265 if dag.get('dag_display_name') and dag['dag_display_name'] != dag['dag_id']:
266 print(f"Display Name: {dag['dag_display_name']}")
267 print(f"Description: {dag.get('description', 'N/A')}")
268 print(f"Is Paused: {Fore.RED if dag.get('is_paused', True) else Fore.GREEN}{'Yes' if dag.get('is_paused', True) else 'No'}{Style.RESET_ALL}")
269 print(f"Schedule: {dag.get('timetable_summary') or dag.get('timetable_description', 'N/A')}")
270 # Extract tag names from tag objects
271 tags = dag.get('tags', [])
272 tag_names = [tag['name'] if isinstance(tag, dict) else str(tag) for tag in tags]
273 print(f"Tags: {', '.join(tag_names) or 'None'}")
274 print(f"Max Active Tasks: {dag.get('max_active_tasks', 'N/A')}")
275 print(f"Max Active Runs: {dag.get('max_active_runs', 'N/A')}")
276 if dag.get('has_import_errors', False):
277 print(f"{Fore.RED}Import Errors: Yes{Style.RESET_ALL}")
278 print(f"Next Run: {format_datetime(dag.get('next_dagrun_run_after'))}")
280 if dag_runs:
281 print(f"\n{Fore.CYAN}Recent DAG Runs:{Style.RESET_ALL}")
282 headers = ["Run ID", "State", "Start Date", "End Date"]
283 rows = []
284 for run in dag_runs:
285 state = run.get('state', 'unknown')
286 color = get_status_color(state)
287 rows.append([
288 run['dag_run_id'],
289 f"{color}{state}{Style.RESET_ALL}",
290 format_datetime(run.get('start_date')),
291 format_datetime(run.get('end_date'))
292 ])
293 print(tabulate(rows, headers=headers, tablefmt="grid"))
294 else:
295 print(f"\n{Fore.YELLOW}No recent runs found{Style.RESET_ALL}")
298def cmd_pause(client: AirflowClient, args):
299 """Pause a DAG"""
300 result = client.toggle_dag_pause(args.dag_id, True)
301 print(f"{Fore.GREEN}DAG '{args.dag_id}' has been paused{Style.RESET_ALL}")
304def cmd_unpause(client: AirflowClient, args):
305 """Unpause a DAG"""
306 result = client.toggle_dag_pause(args.dag_id, False)
307 print(f"{Fore.GREEN}DAG '{args.dag_id}' has been unpaused{Style.RESET_ALL}")
310def cmd_trigger(client: AirflowClient, args):
311 """Trigger a DAG run"""
312 config = None
313 if args.config:
314 try:
315 config = json.loads(args.config)
316 except json.JSONDecodeError:
317 print(f"{Fore.RED}Invalid JSON config: {args.config}{Style.RESET_ALL}")
318 sys.exit(1)
320 result = client.trigger_dag(args.dag_id, config)
321 print(f"{Fore.GREEN}DAG '{args.dag_id}' triggered successfully{Style.RESET_ALL}")
322 print(f"Run ID: {result['dag_run_id']}")
323 print(f"State: {result['state']}")
326def cmd_tasks(client: AirflowClient, args):
327 """View tasks in a DAG and their statuses"""
328 # Get the latest DAG run if run_id not specified
329 if not args.run_id:
330 dag_runs = client.get_dag_runs(args.dag_id, limit=1)
331 if not dag_runs:
332 print(f"{Fore.YELLOW}No DAG runs found for '{args.dag_id}'{Style.RESET_ALL}")
333 return
334 dag_run_id = dag_runs[0]['dag_run_id']
335 print(f"Using latest run: {dag_run_id}")
336 else:
337 dag_run_id = args.run_id
339 tasks = client.get_task_instances(args.dag_id, dag_run_id)
341 if not tasks:
342 print(f"{Fore.YELLOW}No tasks found{Style.RESET_ALL}")
343 return
345 print(f"\n{Fore.CYAN}Tasks for DAG '{args.dag_id}' (Run: {dag_run_id}):{Style.RESET_ALL}")
347 headers = ["Task ID", "State", "Start Date", "End Date", "Duration", "Try Number"]
348 rows = []
350 for task in sorted(tasks, key=lambda x: x.get('start_date') or ''):
351 state = task.get('state', 'unknown')
352 color = get_status_color(state)
354 # Calculate duration
355 duration = "N/A"
356 if task.get('start_date') and task.get('end_date'):
357 try:
358 start = datetime.fromisoformat(task['start_date'].replace('Z', '+00:00'))
359 end = datetime.fromisoformat(task['end_date'].replace('Z', '+00:00'))
360 duration = str(end - start).split('.')[0] # Remove microseconds
361 except:
362 pass
364 rows.append([
365 task['task_id'],
366 f"{color}{state}{Style.RESET_ALL}",
367 format_datetime(task.get('start_date')),
368 format_datetime(task.get('end_date')),
369 duration,
370 task.get('try_number', 1)
371 ])
373 print(tabulate(rows, headers=headers, tablefmt="grid"))
375 # Summary
376 state_counts = {}
377 for task in tasks:
378 state = task.get('state', 'unknown')
379 state_counts[state] = state_counts.get(state, 0) + 1
381 print(f"\n{Fore.CYAN}Summary:{Style.RESET_ALL}")
382 for state, count in state_counts.items():
383 color = get_status_color(state)
384 print(f" {color}{state}: {count}{Style.RESET_ALL}")
387def cmd_logs(client: AirflowClient, args):
388 """View task logs"""
389 # Get the latest DAG run if run_id not specified
390 if not args.run_id:
391 dag_runs = client.get_dag_runs(args.dag_id, limit=1)
392 if not dag_runs:
393 print(f"{Fore.YELLOW}No DAG runs found for '{args.dag_id}'{Style.RESET_ALL}")
394 return
395 dag_run_id = dag_runs[0]['dag_run_id']
396 print(f"Using latest run: {dag_run_id}")
397 else:
398 dag_run_id = args.run_id
400 try:
401 logs = client.get_task_log(args.dag_id, dag_run_id, args.task_id, args.try_number)
402 print(f"\n{Fore.CYAN}Logs for task '{args.task_id}' (Try: {args.try_number}):{Style.RESET_ALL}")
403 print("-" * 80)
405 # Try to parse as JSON and format nicely
406 try:
407 log_data = json.loads(logs)
408 if isinstance(log_data, dict) and 'content' in log_data:
409 for entry in log_data['content']:
410 if 'timestamp' in entry and 'event' in entry:
411 timestamp = entry['timestamp'][:19].replace('T', ' ')
412 level = entry.get('level', 'info').upper()
413 logger = entry.get('logger', '')
414 event = entry['event']
416 # Color code by level
417 if level == 'ERROR':
418 level_color = Fore.RED
419 elif level == 'WARNING':
420 level_color = Fore.YELLOW
421 elif level == 'INFO':
422 level_color = Fore.CYAN
423 else:
424 level_color = Fore.WHITE
426 print(f"{timestamp} {level_color}[{level}]{Style.RESET_ALL} {logger}: {event}")
427 elif 'event' in entry:
428 # Simple event without timestamp
429 print(entry['event'])
430 else:
431 # Not the expected format, print as is
432 print(logs)
433 except json.JSONDecodeError:
434 # Not JSON, print as plain text
435 print(logs)
437 print("-" * 80)
438 except Exception as e:
439 print(f"{Fore.RED}Failed to retrieve logs: {e}{Style.RESET_ALL}")
442def cmd_clear(client: AirflowClient, args):
443 """Clear failed tasks in a DAG"""
444 # Get the latest DAG run if run_id not specified
445 if not args.run_id:
446 dag_runs = client.get_dag_runs(args.dag_id, limit=1)
447 if not dag_runs:
448 print(f"{Fore.YELLOW}No DAG runs found for '{args.dag_id}'{Style.RESET_ALL}")
449 return
450 dag_run_id = dag_runs[0]['dag_run_id']
451 print(f"Using latest run: {dag_run_id}")
452 else:
453 dag_run_id = args.run_id
455 if args.task_id:
456 # Clear specific task
457 result = client.clear_task_instance(args.dag_id, dag_run_id, args.task_id)
458 print(f"{Fore.GREEN}Cleared task '{args.task_id}'{Style.RESET_ALL}")
459 else:
460 # Clear all failed tasks
461 tasks = client.get_task_instances(args.dag_id, dag_run_id)
462 failed_tasks = [t for t in tasks if t.get('state') == 'failed']
464 if not failed_tasks:
465 print(f"{Fore.YELLOW}No failed tasks found{Style.RESET_ALL}")
466 return
468 print(f"Found {len(failed_tasks)} failed tasks:")
469 for task in failed_tasks:
470 print(f" - {task['task_id']}")
472 if not args.yes:
473 response = input(f"\n{Fore.YELLOW}Clear all failed tasks? [y/N]: {Style.RESET_ALL}")
474 if response.lower() != 'y':
475 print("Cancelled")
476 return
478 for task in failed_tasks:
479 result = client.clear_task_instance(args.dag_id, dag_run_id, task['task_id'])
480 print(f"{Fore.GREEN}Cleared task '{task['task_id']}'{Style.RESET_ALL}")
483def main():
484 examples = """
485Examples:
486 # Set credentials via environment variables
487 export AIRFLOW_USER=admin AIRFLOW_PASSWORD=secret
489 # List all DAGs
490 afcli list
492 # Get DAG status and recent runs
493 afcli status my_dag
495 # View tasks and their status in a DAG run
496 afcli tasks my_dag
498 # Trigger a DAG with configuration
499 afcli trigger my_dag --config '{"date": "2024-01-01", "env": "prod"}'
501 # View logs for a specific task
502 afcli logs my_dag task_name
504 # Pause/unpause DAGs
505 afcli pause my_dag
506 afcli unpause my_dag
508 # Clear failed tasks
509 afcli clear my_dag
511 # Use with different Airflow instance
512 afcli --host airflow.company.com:8080 --user admin --password secret list
514Useful LLM Context Commands:
515 afcli list --limit 20 # See available DAGs
516 afcli status <dag_id> # Get DAG details and recent runs
517 afcli tasks <dag_id> # See task execution status
518 afcli logs <dag_id> <task_id> # Debug task failures
519"""
521 parser = argparse.ArgumentParser(
522 description="Airflow CLI wrapper - A command-line utility for interacting with Airflow REST API",
523 epilog=examples,
524 formatter_class=argparse.RawDescriptionHelpFormatter
525 )
527 # Global arguments
528 parser.add_argument("--host",
529 default=os.environ.get("AIRFLOW_HOST", "localhost:8080"),
530 help="Airflow host (default: localhost:8080, env: AIRFLOW_HOST)")
531 parser.add_argument("--user",
532 default=os.environ.get("AIRFLOW_USER"),
533 help="Username for API authentication (env: AIRFLOW_USER)")
534 parser.add_argument("--password",
535 default=os.environ.get("AIRFLOW_PASSWORD"),
536 help="Password for API authentication (env: AIRFLOW_PASSWORD)")
538 # Subcommands
539 subparsers = parser.add_subparsers(dest="command", help="Available commands")
541 # List command
542 list_parser = subparsers.add_parser("list", help="List all DAGs")
543 list_parser.add_argument("--limit", type=int, default=100, help="Maximum number of DAGs to display (default: 100)")
544 list_parser.add_argument("--all", action="store_true", help="Show all DAGs including inactive ones")
546 # Status command
547 status_parser = subparsers.add_parser("status", help="View DAG status")
548 status_parser.add_argument("dag_id", help="DAG ID")
550 # Pause command
551 pause_parser = subparsers.add_parser("pause", help="Pause a DAG")
552 pause_parser.add_argument("dag_id", help="DAG ID")
554 # Unpause command
555 unpause_parser = subparsers.add_parser("unpause", help="Unpause a DAG")
556 unpause_parser.add_argument("dag_id", help="DAG ID")
558 # Trigger command
559 trigger_parser = subparsers.add_parser("trigger", help="Trigger a DAG run")
560 trigger_parser.add_argument("dag_id", help="DAG ID")
561 trigger_parser.add_argument("--config", help="JSON configuration for the DAG run")
563 # Tasks command
564 tasks_parser = subparsers.add_parser("tasks", help="View tasks in a DAG and their statuses")
565 tasks_parser.add_argument("dag_id", help="DAG ID")
566 tasks_parser.add_argument("--run-id", help="DAG run ID (default: latest)")
568 # Logs command
569 logs_parser = subparsers.add_parser("logs", help="View task logs")
570 logs_parser.add_argument("dag_id", help="DAG ID")
571 logs_parser.add_argument("task_id", help="Task ID")
572 logs_parser.add_argument("--run-id", help="DAG run ID (default: latest)")
573 logs_parser.add_argument("--try-number", type=int, default=1, help="Task try number (default: 1)")
575 # Clear command
576 clear_parser = subparsers.add_parser("clear", help="Clear failed tasks")
577 clear_parser.add_argument("dag_id", help="DAG ID")
578 clear_parser.add_argument("--task-id", help="Specific task ID to clear (default: all failed tasks)")
579 clear_parser.add_argument("--run-id", help="DAG run ID (default: latest)")
580 clear_parser.add_argument("-y", "--yes", action="store_true", help="Skip confirmation prompt")
582 args = parser.parse_args()
584 if not args.command:
585 parser.print_help()
586 sys.exit(1)
588 # Create client
589 client = AirflowClient(args.host, args.user, args.password)
591 # Execute command
592 commands = {
593 "list": cmd_list,
594 "status": cmd_status,
595 "pause": cmd_pause,
596 "unpause": cmd_unpause,
597 "trigger": cmd_trigger,
598 "tasks": cmd_tasks,
599 "logs": cmd_logs,
600 "clear": cmd_clear
601 }
603 command_func = commands.get(args.command)
604 if command_func:
605 command_func(client, args)
606 else:
607 parser.print_help()
608 sys.exit(1)
611if __name__ == "__main__":
612 main()