0% found this document useful (0 votes)
79 views8 pages

Test Sqs

This document defines Python code for an Airflow DAG that sends messages to an Amazon SQS queue. It includes: 1) Functions to get an SQS client, send messages to SQS queues, and define an SQSOperator class as an Airflow operator. 2) A DAG defined with a schedule to run daily that contains an SQSOperator task and PythonOperator task. 3) The SQSOperator task is configured to send messages to an SQS queue when executed, and check logs of failed tasks to include in the message payload.

Uploaded by

samir silwal
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
79 views8 pages

Test Sqs

This document defines Python code for an Airflow DAG that sends messages to an Amazon SQS queue. It includes: 1) Functions to get an SQS client, send messages to SQS queues, and define an SQSOperator class as an Airflow operator. 2) A DAG defined with a schedule to run daily that contains an SQSOperator task and PythonOperator task. 3) The SQSOperator task is configured to send messages to an SQS queue when executed, and check logs of failed tasks to include in the message payload.

Uploaded by

samir silwal
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
You are on page 1/ 8

# import datetime as dt

# from airflow import DAG


# import boto3
# from airflow.utils.trigger_rule import TriggerRule
# from airflow.configuration import conf
# from airflow.utils.helpers import parse_template_string

# from airflow.utils.decorators import apply_defaults


# from airflow.models import BaseOperator, Variable, TaskInstance
# from airflow.operators.python_operator import PythonOperator
# import json
# import logging
# import datetime, time
# from airflow.utils.db import provide_session
# from mock import patch, Mock

# AWS_DEFAULT_REGION = "us-east-1"

# dag = DAG(
# 'airflow_sqs_dag',
# description='DAG testing sqs airflow',
# schedule_interval='* * * * *',
# start_date=dt.datetime(2020, 3, 17),
# catchup=False,
# )

# def get_sqs_client(**params):
# """
# Get SQS client object.
# params:
# param 'region_name' name of region of AWS inf
# type 'string'
# param 'aws_access_key_id' AWS access key
# type 'string'
# param 'region_name' AWS secret key
# type 'string'
# """

# session = boto3.Session(
# aws_access_key_id = "AKIAQNCUA6ZLX57ZKLNB",
# aws_secret_access_key ="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",
# region_name = "us-east-1"
# )
# sqs_client = session.client("sqs")

# return sqs_client

# def send_message_1(queue_url: str, type: str, payload={}, meta={}, **params):


# """
# Send message to specific SQS queue.
# queue_url:url of queue
# type 'string'
# type:message type
# type 'string'
# payload:{} | optional
# params:
# param 'region_name' name of region of AWS inf
# type 'string'
# param 'aws_access_key_id' AWS access key
# type 'string'
# param 'region_name' AWS secret key
# type 'string'
# """

# if not type or not queue_url:


# raise ValueError("The `type` and `queue_url` field cannot be empty.")

# parsed_message = json.dumps({"type": type, "payload": payload, "meta": meta})


# sqs_client = get_sqs_client(**params)
# if ".fifo" in queue_url:
# message_group_id = type + datetime.datetime.now().strftime("%d%m%Y%H%M
%S")
# response = sqs_client.send_message(
# QueueUrl=queue_url,
# MessageBody=parsed_message,
# MessageGroupId=message_group_id,
# )
# else:
# response = sqs_client.send_message(
# QueueUrl=queue_url, MessageBody=parsed_message
# )

# return response["MessageId"]

# class SQSOperatortest(BaseOperator):
# """
# Operator to send a message (payload) to Amazon SQS Queue.
# :param endpoint: QueueUrl for the message
# :type endpoint: string
# :param message_type: Type of message
# :type message_type: string
# :param payload: Message contents
# :type payload: dict
# :param meta: Meta data
# :type meta: dict
# :param region: Queues region name
# :type region: string
# """

# @apply_defaults
# def __init__(
# self, endpoint:str, message_type:str,
# payload={}, meta={}, region="us-east-1", *args, **kwargs):
# """ Calls BaseOperators' constructor and set passed variables(or
defaults) for operator """

# super(SQSOperatortest, self).__init__(*args, **kwargs)


# self.endpoint = endpoint
# self.message_type = message_type
# self.payload = payload
# self.meta = meta
# self.region = region
# def get_log_trail(self, fname, ti):
# ''' Return small log part containing the ERROR message. '''
# try:
# with open(fname, 'r') as f:
# lines = f.readlines()
# print(ti.state)
# for num, line in enumerate(lines):
# if 'ERROR' in line:
# return '\n'.join(lines[num - 10: num + 10])
# ''' If ERROR can't be found in the log file, then the last 20 lines
of the file is returned. '''
# return '\n'.join(lines[-20:])
# except:
# print('Error while reading log file for {}'.format(ti.task_id))

# @provide_session
# def notify_with_log(self, exec_date, session=None):
# upstream_task_instances = (
# session.query(TaskInstance)
# .filter(
# TaskInstance.dag_id == dag.dag_id,
# TaskInstance.execution_date == exec_date
# )
# .all()
# )

# notified = False
# for tis in upstream_task_instances:
# base_log_folder = conf.get('core', 'BASE_LOG_FOLDER')
# filename_template = conf.get('core', 'LOG_FILENAME_TEMPLATE')
# filename_template = parse_template_string(filename_template)[1]
# context = {
# 'ti': tis,
# 'ti.dag_id': tis.dag_id,
# 'ti.task_id': tis.task_id,
# 'execution_date': tis.execution_date,
# 'try_number': tis.next_try_number - 1
# }

# filepath = '{}/{}'.format(base_log_folder,
filename_template.render(**context))
# if tis.state == 'failed':
# log_trail = self.get_log_trail(filepath, tis)
# log_url = tis.log_url

# return log_url, log_trail

# def execute(self, context):


# """ Send message to SQS queue and return message ID. """
# exec_date=context.get('execution_date')
# log_url, log_trial = self.notify_with_log(exec_date=exec_date)

# messageId = send_message_1(
# queue_url = self.endpoint,
# type = self.message_type,
# payload = {"text": "HR data transfered to application completed." +
log_url,
# "message_params": {
# "tenant": "tenant".upper(),
# "description": {
# "files/db_asd.sql": 3,
# "laudio-database/db_etl.sql": 41,
# "laudio-airflow/db.sql": 2,
# },
# },
# "color": "#0AFFA0"
# },
# meta = self.meta,
# region_name = self.region
# )

# return messageId

# msg = SQSOperatortest(
# # endpoint="https://sqs.us-east-1.amazonaws.com/028093511255/notification-
queue",
# endpoint="https://sqs.us-east-1.amazonaws.com/028093511255/trial-queue",
# message_type="hr-data-transfer",
# payload={"text": "HR data transfered to application completed.",
# "message_params": {
# "tenant": "tenant".upper(),
# "description": {
# "files/db_asd.sql": 3,
# "laudio-database/db_etl.sql": 41,
# "laudio-airflow/db.sql": 2,
# },
# },
# "color": "#0AFFA0"
# },

# # AWS_ACCESS_KEY_ID="AKIAQNCUA6ZLX57ZKLNB",
# # AWS_SECRET_ACCESS_KEY="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",

# task_id="airflow_sqs",
# trigger_rule=TriggerRule.ONE_FAILED,
# dag=dag
# )
# def demo_method(**kwargs):
# # time.sleep(30)
# print("hello world")
# raise Exception("error occoured")

# test_first = PythonOperator(
# python_callable=demo_method,
# op_kwargs={
# 'text': "message"
# },
# task_id='notify_failure',
# dag=dag
# )
# test_first >> msg

# def get_log_trail(self, fname, ti):


# """ Return small log part containing the ERROR message. """
# try:
# with open(fname, "r") as f:
# lines = f.readlines()
# print(ti.state)
# for num, line in enumerate(lines):
# if "ERROR" in line:
# return "\n".join(lines[num - 10 : num + 10])
# """ If ERROR can't be found in the log file, then the last 20 lines of
the file is returned. """
# return "\n".join(lines[-20:])
# except IOError:
# logging.info("Error while reading log file for {}".format(ti.task_id))

# @provide_session
# def extract_log(self, dag, exec_date, session=None):
# """ Return log-url and trials of failed task. """
# upstream_task_instances = (
# session.query(TaskInstance)
# .filter(
# TaskInstance.dag_id == dag.dag_id, TaskInstance.execution_date ==
exec_date
# )
# .all()
# )
# notified = False
# for tis in upstream_task_instances:
# base_log_folder = conf.get("core", "BASE_LOG_FOLDER")
# filename_template = conf.get("core", "LOG_FILENAME_TEMPLATE")
# filename_template = parse_template_string(filename_template)[1]
# context = {
# "ti": tis,
# "ti.dag_id": tis.dag_id,
# "ti.task_id": tis.task_id,
# "execution_date": tis.execution_date,
# "try_number": tis.next_try_number - 1,
# }
# filepath = "{}/{}".format(base_log_folder,
filename_template.render(**context))
# if tis.state == "failed":
# log_trail = self.get_log_trail(filepath, tis)
# log_url = tis.log_url

# return log_url, log_trail

# class SQSOperator(BaseOperator):
# """
# Operator to send a message (payload) to Amazon SQS Queue.

# :param endpoint: QueueUrl for the message


# :type endpoint: string
# :param message_type: Type of message
# :type message_type: string
# :param payload: Message contents
# :type payload: dict
# :param meta: Meta data
# :type meta: dict
# :param region: Queues region name
# :type region: string
# """

# @apply_defaults
# def __init__(
# self,
# endpoint: str,
# message_type: str,
# payload={},
# meta = {},
# region=AWS_DEFAULT_REGION,
# *args,
# **kwargs
# ):
# """ Calls BaseOperators' constructor and set passed variables(or
defaults) for operator """

# super(SQSOperator, self).__init__(*args, **kwargs)


# self.endpoint = endpoint
# self.message_type = message_type
# self.payload = payload
# self.meta = meta
# self.region = region
# self.dag = kwargs.get("dag")
# self.task_id = kwargs.get("task_id")

# def execute(self, context):


# """ Send message to SQS queue and return message ID. """

# if self.payload.get("status", "success") == "failed":


# exec_date=context.get('execution_date')
# log_url, log_trial = extract_log(dag=self.dag, exec_date=exec_date)

# meta = {
# **self.meta,
# "task_id": self.task_id,
# "dag_id": self.dag.dag_id,
# "log_url": log_url,
# "log_trail": log_trial
# }
# else:
# meta = {**self.meta}
# logging.info("Sent message to SQS queue {}".format(self.endpoint))

# messageId = send_message_1(
# queue_url=self.endpoint,
# type=self.message_type,
# payload=self.payload,
# meta=meta,
# region=self.region,
# )
# return messageId

# def test_SQSOperator():
# """ Test SQSOperator for return value """

# dag = DAG(
# "airflow_sqs_dag",
# description="DAG testing sqs airflow",
# schedule_interval="* * * * *",
# start_date=dt.datetime(2020, 3, 21),
# catchup=False,
# )
# test_response = {"MessageId": 13}
# test_endpoint = "https://sqs.us-east-1.amazonaws.com/0/test"
# test_message_type = "airflow"
# test_payload = {"test": "Airflow test"}

# with patch("test_sqs.get_sqs_client") as get_sqs_client:


# get_sqs_client().send_message.return_value = test_response
# sqs_task = SQSOperator(
# endpoint=test_endpoint,
# message_type=test_message_type,
# payload=test_payload,
# task_id="airflow_sqs",
# dag=dag,
# )
# message_id = sqs_task.execute(dag)

# assert message_id == test_response["MessageId"]


# get_sqs_client().send_message.assert_called_once_with(
# MessageBody = '{"type": "' + test_message_type + '", "payload": ' +
json.dumps(test_payload) + ', "meta": {}}',
# QueueUrl = test_endpoint
# )
from laudio.util import sqs

def test_lambda():
sqs.send_message(
queue_url="https://sqs.us-east-1.amazonaws.com/028093511255/trial-
queue",
type="HR_DATA_TRANSFER_SUCCESS",
payload={"text": "HR data transfered to application failed.",
"status": "failed",
},
meta={
"tenant": "REX"
},
aws_access_key_id = "AKIAQNCUA6ZLX57ZKLNB",
aws_secret_access_key ="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",
)
print("hello")
print("lol")
test_lambda()

You might also like