celery 的 集成 以及 ansible api 的调用过程。
django 框架集成了celery ,初衷就是一下 请求到view 的时候,需要执行一些耗时程序,需要一个异步的东西,来代替执行。
jumpserver 中 出现了大量 这样的源码。
class AssetRefreshHardwareApi(generics.RetrieveAPIView):
"""
Refresh asset hardware info
"""
queryset = Asset.objects.all()
serializer_class = serializers.AssetSerializer
permission_classes = (IsSuperUser,)
def retrieve(self, request, *args, **kwargs):
asset_id = kwargs.get('pk')
asset = get_object_or_404(Asset, pk=asset_id)
task = update_asset_hardware_info_manual.delay(asset)
return Response({"task": task.id})
这是一个 刷新资产资产硬件信息的 视图类 , 在 retrive 方法中 ,根据 url 中的 pk 参数,弹道 asset 实例后,然后调用了一个方法
update_asset_hardware_info_manual.delay 方法 ,这个方法是什么,首先来看源码。
@shared_task
def update_asset_hardware_info_manual(asset):
# task_name = _("Update asset hardware info")
task_name = _("更新资产硬件信息")
return update_assets_hardware_info_util([asset], task_name=task_name)
@shared_task 语法糖 ,装饰器模式 在 干什么 ,点进源码 看一下,这个 装饰器 作用就是 update_asset_hardware_info_manual 跟 参数 asset 参数 传入 到 装饰器里,具体实现 看源码
def shared_task(*args, **kwargs):
"""Create shared tasks (decorator).
Will return a proxy that always takes the task from the current apps
task registry.
This can be used by library authors to create tasks that will work
for any app environment.
Example:
>>> from celery import Celery, shared_task
>>> @shared_task
... def add(x, y):
... return x + y
>>> app1 = Celery(broker='amqp://')
>>> add.app is app1
True
>>> app2 = Celery(broker='redis://')
>>> add.app is app2
"""
def create_shared_task(**options):
def __inner(fun):
name = options.get('name')
# Set as shared task so that unfinalized apps,
# and future apps will load the task.
connect_on_app_finalize(
lambda app: app._task_from_fun(fun, **options)
)
# Force all finalized apps to take this task as well.
for app in _get_active_apps():
if app.finalized:
with app._finalize_mutex:
app._task_from_fun(fun, **options)
# Return a proxy that always gets the task from the current
# apps task registry.
def task_by_cons():
app = current_app()
return app.tasks[
name or gen_task_name(app, fun.__name__, fun.__module__)
]
return Proxy(task_by_cons)
return __inner
关于 celery 的任务 从生成,到执行的过程又是一个大坑,暂时不去管他,通过share_task 生成了新的任务,新的认为有是根据被装饰的函数生成得。下面贴出这个函数得详细信息。
@shared_task
def update_assets_hardware_info_util(assets, task_name=None):
"""
Using ansible api to update asset hardware info
:param assets: asset seq
:param task_name: task_name running
:return: result summary ['contacted': {}, 'dark': {}]
"""
from ops.utils import update_or_create_ansible_task
if task_name is None:
# task_name = _("Update some assets hardware info")
task_name = _("更新资产硬件信息")
tasks = const.UPDATE_ASSETS_HARDWARE_TASKS
hostname_list = [asset.hostname for asset in assets if asset.is_active and asset.is_unixlike()]
if not hostname_list:
logger.info("Not hosts get, may be asset is not active or not unixlike platform")
return {}
task, created = update_or_create_ansible_task(
task_name, hosts=hostname_list, tasks=tasks, pattern='all',
options=const.TASK_OPTIONS, run_as_admin=True, created_by='System',
)
result = task.run()
# Todo: may be somewhere using
# Manual run callback function
set_assets_hardware_info(result)
return result
这个函数接收 asset 实例 作为参数,取出 host name 放进列表,函数中的 tasks 是一个 常量,具体内容为
UPDATE_ASSETS_HARDWARE_TASKS = [
{
'name': "setup",
'action': {
'module': 'setup'
}
}
]
tasks 常量 只要 规定了 ansible 要用到得一些模块,跟命令。
下面 具体的update_or_create_ansible_task 这个函数,
def update_or_create_ansible_task(
task_name, hosts, tasks,
interval=None, crontab=None, is_periodic=False,
callback=None, pattern='all', options=None,
run_as_admin=False, run_as="", become_info=None,
created_by=None,
):
if not hosts or not tasks or not task_name:
return
defaults = {
'name': task_name,
'interval': interval,
'crontab': crontab,
'is_periodic': is_periodic,
'callback': callback,
'created_by': created_by,
}
created = False
task, _ = Task.objects.update_or_create(
defaults=defaults, name=task_name,
)
adhoc = task.latest_adhoc
new_adhoc = AdHoc(task=task, pattern=pattern,
run_as_admin=run_as_admin,
run_as=run_as)
new_adhoc.hosts = hosts
new_adhoc.tasks = tasks
new_adhoc.options = options
new_adhoc.become = become_info
if not adhoc or adhoc != new_adhoc:
print("Task create new adhoc: {}".format(task_name))
new_adhoc.save()
task.latest_adhoc = new_adhoc
created = True
return task, created
这个函数 主要两个作用,创建了task 实例 跟 ahoc 实例,并 把信息存进数据库,函数返回得是 task 对象 跟 create boolean 类型。然后接下来会调用时 task 的 run 方法,run方法 具体实现如下。
def run(self, record=True):
if self.latest_adhoc:
return self.latest_adhoc.run(record=record)
task 的run 方法 就是调用的 last_adhoc 的 run方法, 而 ahoc 的run方法 又是调用的ahocrunner 的 run方法发,具体源码 贴出如下。
def run(self, tasks, pattern, play_name='Ansible Ad-hoc', gather_facts='no', file_obj=None):
"""
:param tasks: [{'action': {'module': 'shell', 'args': 'ls'}, ...}, ]
:param pattern: all, *, or others
:param play_name: The play name
:param gather_facts:
:param file_obj: logging to file_obj
:return:
"""
self.check_pattern(pattern)
self.results_callback = self.get_result_callback(file_obj)
cleaned_tasks = self.clean_tasks(tasks)
play_source = dict(
name=play_name,
hosts=pattern,
gather_facts=gather_facts,
tasks=cleaned_tasks
)
play = Play().load(
play_source,
variable_manager=self.variable_manager,
loader=self.loader,
)
tqm = TaskQueueManager(
inventory=self.inventory,
variable_manager=self.variable_manager,
loader=self.loader,
options=self.options,
stdout_callback=self.results_callback,
passwords=self.options.passwords,
)
print("Get matched hosts: {}".format(
self.inventory.get_matched_hosts(pattern)
))
try:
tqm.run(play)
return self.results_callback
except Exception as e:
raise AnsibleError(e)
finally:
tqm.cleanup()
self.loader.cleanup_all_tmp_files()
这个函数主要是 实例化了一个play 对象,而play 主要是 ansible 实现的批量化部署方式。接下看对ansible的探索。