python fabric实现远程操作和部署

来源:互联网 发布:wex5 php 后端开发 编辑:程序博客网 时间:2024/05/17 05:14
环境配置
在本机和目标机器安装对应包(注意,都要有)

sudo easy_install fabric
目前是1.8版本(或者用pip install,一样的)

安装完后,可以查看是否安装成功
[ken@~$] which fab
/usr/local/bin/fab
装完之后,可以浏览下官方文档

执行本机操作
from fabric.api import localdef lsfab():    local('cd ~/tmp/fab')    local('ls')结果:[ken@~/tmp/fab$] pwd;ls/Users/ken/tmp/fabfabfile.py   fabfile.pyc  test.py      test.pyc[ken@~/tmp/fab$] fab -f test.py lsfab[localhost] local: cd ~/tmp/fab[localhost] local: lsfabfile.py  fabfile.pyc test.py     test.pycDone.


远端操作
这时候,假设,你要到机器A的/home/ken/project对应项目目录把配置文件更新下来
#!/usr/bin/env python# encoding: utf-8from fabric.api import local,cd,runenv.hosts=['user@ip:port',] #ssh要用到的参数env.password = 'pwd'def setting_ci():    local('echo "add and commit settings in local"')    #刚才的操作换到这里,你懂的def update_setting_remote():    print "remote update"    with cd('~/temp'):   #cd用于进入某个目录        run('ls -l | wc -l')  #远程操作用rundef update():    setting_ci()    update_setting_remote()
然后,执行之:[ken@~/tmp/fab$] fab -f deploy.py update[user@ip:port] Executing task 'update'[localhost] local: echo "add and commit settings in local"add and commit settings in localremote update[user@ip:port] run: ls -l | wc -l[user@ip:port] out: 12[user@ip:port] out:
注意,如果不声明env.password,执行到对应机器时会跳出要求输入密码的交互

设置远程服务器host
from fabric.api import run, envenv.user= 'root'env.hosts = ['host1', 'jazywoo@host2'] #全局hostenv.password= '123456'env.passwords= {     'host1' : 123456,     'host2' : 123456789,}def taskA():    run('ls')def taskB():    run('whoami')taskA executed on host1   #执行结果taskA executed on host2taskB executed on host1taskB executed on host2

from fabric.api import envenv.roledefs['webservers'] = ['www1', 'www2', 'www3']
from fabric.api import envenv.roledefs = {    'web': ['www1', 'www2', 'www3'],    'dns': ['ns1', 'ns2']}
from fabric.api import env, rundef set_hosts(): #本地设置host    env.hosts = ['host1', 'host2']def mytask():    run('ls /var/www')
$ fab -H host1,host2 mytask #命令行设置host$ fab mytask:hosts="host1;host2" 针对任务设置host
from fabric.api import env, runenv.hosts.extend(['host3', 'host4'])   #整合命令行,合并hostdef mytask():    run('ls /var/www')
from fabric.api import hosts, run@hosts('host1', 'host2')   #使用host修饰来制定hostdef mytask():    run('ls /var/www')my_hosts = ('host1', 'host2')@hosts(my_hosts)def mytask():    # ...
from fabric.api import env, hosts, roles, runenv.roledefs = {'role1': ['b', 'c']}@hosts('a', 'b')@roles('role1')  #使用roles修饰来指定hostdef mytask():    run('ls /var/www')



执行任务execute和run_once


from fabric.api import run, roles, executeenv.roledefs = {    'db': ['db1', 'db2'],    'web': ['web1', 'web2', 'web3'],}@roles('db')def migrate():    # Database stuff here.    pass@roles('web')def update():    # Code updates here.    passdef deploy():    execute(migrate)   #执行任务 ,多次调用execute可以多次执行,而使用run_once保证只执行一次    execute(update)execute得到host结果
from fabric.api import task, execute, run, runs_once@taskdef workhorse():    return run("get my infos")@task@runs_oncedef go():    results = execute(workhorse)    print results



使用task修饰

from fabric.api import task@task(alias='dwm') #task别名def deploy_with_migrations():    pass@taskdef mytask():    run("a command")这样使用终端命令 fab --list可以查看任务列别也可以使用Task类封装所有操作
class MyTask(Task):    name = "deploy"    def run(self, environment, domain="whatever.com"):        run("git clone foo")        sudo("service apache2 restart")instance = MyTask()============相当于===============@taskdef deploy(environment, domain="whatever.com"):    run("git clone foo")    sudo("service apache2 restart")
from fabric.api import taskfrom fabric.tasks import Taskclass CustomTask(Task):    def __init__(self, func, myarg, *args, **kwargs):        super(CustomTask, self).__init__(*args, **kwargs)        self.func = func        self.myarg = myarg    def run(self, *args, **kwargs):        return self.func(*args, **kwargs)@task(task_class=CustomTask, myarg='value', alias='at')def actual_task():    pass
当tabfile执行的时候,实际执行如下:task_obj = CustomTask(actual_task, myarg='value')

Parallel并行任务
上述任务的执行是串行serially的,
from fabric.api import *def update():    with cd("/srv/django/myapp"):        run("git pull")def reload():    sudo("service apache2 reload")
终端执行 $ fab -H web1,web2,web3 update reload 之后结果如下:1 update on web12 update on web23 update on web34 reload on web15 reload on web26 reload on web3而并行结果希望是:1 update on web1, web2, and web32 reload on web1, web2, and web3
并行处理等待的时间更短from fabric.api import *@parallel  #使用修饰副def runs_in_parallel():    passdef runs_serially():    pass
终端 $ fab -H host1,host2,host3 runs_in_parallel runs_serially结果:1 runs_in_parallel on host1, host2, and host32 runs_serially on host13 runs_serially on host24 runs_serially on host3或者直接在终端命令执行时用 —P 参数$ fab -H host1,host2,host3 -P runs_in_parallel runs_serially
还可以设置并行的个数from fabric.api import *@parallel(pool_size=5) #使用pool_sizedef heavy_task():    # lots of heavy local lifting or lots of IO here$ fab -P -z 5 heavy_task #命令行

关闭远程任务Connection

没有关闭连接。python程序会一直等待新资源发送
from fabric.state import connectionsfor key in connections.keys():    connections[key].close()    del connections[key]

实例

from fabric.api import abort, cd, env, get, hide, hosts, local, prompt, \   #fabric的操作命令    put, require, roles, run, runs_once, settings, show, sudo, warnfrom fabric.api import *@hosts('host1')def clean_and_upload():    local('find assets/ -name "*.DS_Store" -exec rm '{}' \;')    local('tar czf /tmp/assets.tgz assets/')    put('/tmp/assets.tgz', '/tmp/assets.tgz')    with cd('/var/www/myapp/'):        run('tar xzf /tmp/assets.tgz')




0 0