[REV] remove python code from rbd-builder

This commit is contained in:
qingguo 2018-03-24 16:16:47 +08:00
parent dfff8f7eac
commit 9e5110594d
504 changed files with 15 additions and 150317 deletions

View File

@ -2,45 +2,16 @@ FROM goodrainapps/alpine:3.4
MAINTAINER zengqg@goodrain.com
RUN apk --no-cache add gcc musl-dev openssl python python-dev py-crypto \
git openssh-client coreutils perl sudo
ENV PKG_URL="http://goodrain-pkg.oss-cn-shanghai.aliyuncs.com/pkg"
RUN curl $PKG_URL/labor_docker.tar.gz | tar -xzC /usr/bin/ \
&& curl $PKG_URL/labor_libzmq.tar.gz | tar -xzC /usr/local/ \
&& adduser -u 200 -D -S rain \
&& echo 'rain ALL = (root) NOPASSWD: ALL' > /etc/sudoers.d/rain \
&& curl https://bootstrap.pypa.io/get-pip.py | python -
RUN apk --no-cache add openssl git openssh-client perl sudo
ADD rainbond-chaos /run/rainbond-chaos
ADD entrypoint.sh /run/entrypoint.sh
ADD plugins /run/plugins
ADD build.pl /run/build.pl
ENV REGION_TAG labor
ENV WORK_DIR /run
ENV GR_PIP_VERSION 4
ADD requirements.txt $WORK_DIR/requirements.txt
#RUN pip install pyzmq==16.0.2 --install-option --zmq=/usr/local/libzmq
RUN pip install -i https://pypi.tuna.tsinghua.edu.cn/simple pyzmq==16.0.2 --install-option --zmq=/usr/local/libzmq
RUN pip install -r $WORK_DIR/requirements.txt -i https://pypi.doubanio.com/simple
RUN python -c "import compileall;compileall.compile_dir('$WORK_DIR')" \
&& find $WORK_DIR/plugins -name '*.py' -type f -delete
# fix git warning
# ENV HOME=/home/rain
# RUN mkdir /home/rain/.config/git/ -pv && \
# touch /home/rain/.config/git/attributes && \
# chown rain /home/rain -R
ENV RELEASE_DESC=__RELEASE_DESC__
WORKDIR $WORK_DIR
#USER rain
ENTRYPOINT ["/run/entrypoint.sh"]

View File

@ -1,46 +0,0 @@
FROM goodrainapps/alpine:3.4
MAINTAINER zengqg@goodrain.com
RUN apk --no-cache add gcc musl-dev openssl python python-dev py-crypto \
git openssh-client coreutils perl sudo
ENV PKG_URL="http://goodrain-pkg.oss-cn-shanghai.aliyuncs.com/pkg"
RUN curl $PKG_URL/labor_docker.tar.gz | tar -xzC /usr/bin/ \
&& curl $PKG_URL/labor_libzmq.tar.gz | tar -xzC /usr/local/ \
&& adduser -u 200 -D -S rain \
&& echo 'rain ALL = (root) NOPASSWD: ALL' > /etc/sudoers.d/rain \
&& curl https://bootstrap.pypa.io/get-pip.py | python -
ADD rainbond-chaos /run/rainbond-chaos
ADD entrypoint.sh /run/entrypoint.sh
ADD plugins /run/plugins
ENV REGION_TAG labor
ENV WORK_DIR /run
ENV GR_PIP_VERSION 4
ADD requirements.txt $WORK_DIR/requirements.txt
#RUN pip install pyzmq==16.0.2 --install-option --zmq=/usr/local/libzmq
RUN pip install -i https://pypi.tuna.tsinghua.edu.cn/simple pyzmq==16.0.2 --install-option --zmq=/usr/local/libzmq
RUN pip install -r $WORK_DIR/requirements.txt -i https://pypi.doubanio.com/simple
RUN python -c "import compileall;compileall.compile_dir('$WORK_DIR')" \
&& find $WORK_DIR/plugins -name '*.py' -type f -delete
# fix git warning
ENV HOME=/home/rain
RUN mkdir /home/rain/.config/git/ -pv && \
touch /home/rain/.config/git/attributes && \
chown rain /home/rain -R
ENV RELEASE_DESC=app-detection-detection-370.g1ba2f1d
WORKDIR $WORK_DIR
USER rain
ENTRYPOINT ["/run/entrypoint.sh"]

View File

@ -1,518 +0,0 @@
# -*- coding: utf8 -*-
import os
import json
from utils.shell import Executer as shell
from clients.region import RegionAPI
from clients.registry import RegistryAPI
from clients.region_api import RegionBackAPI
from clients.etcdcli import TaskLocker
from clients.hubimageutils import HubUtils
from clients.userconsole import UserConsoleAPI
import etc
import time
import logging
import logging.config
from utils.log import EventLog
from etc import settings
from clients.acp_api import ACPAPI
import fileinput
load_dict = {}
with open("plugins/config.json", 'r') as load_f:
load_dict = json.load(load_f)
logging.config.dictConfig(settings.get_logging(load_dict))
logger = logging.getLogger('default')
if os.access("/var/run/docker.sock", os.W_OK):
DOCKER_BIN = "docker"
else:
DOCKER_BIN = "sudo -P docker"
class AppImage():
def __init__(self, job, *args, **kwargs):
self.job = job
self.configs = kwargs.get("config")
self.region_api = RegionAPI(conf=self.configs["region"])
self.region_client = RegionBackAPI()
self.api = ACPAPI(conf=self.configs['region'])
image_config = self.configs["publish"]["image"]
self.region_registry = RegistryAPI(
host=image_config.get('curr_registry'))
self.oss_registry = RegistryAPI(host=image_config.get('all_registry'))
self.region_registry.set_log_topic('mq_work.app_image')
self.oss_registry.set_log_topic('mq_work.app_image')
self.locker = TaskLocker(conf=self.configs['etcd'])
self.namespace = image_config.get('oss_namespace')
self.user_cs_client = UserConsoleAPI(conf=self.configs["userconsole"])
self.hubclient = HubUtils(image_config)
# 是否配置oss.goodrain.me
self.is_region_image = image_config.get('all_region_image')
self.is_oss_image = image_config.get('oss_image')
def do_work(self):
try:
task = json.loads(self.job.body)
self.task = task
if "event_id" in self.task:
self.event_id = task["event_id"]
self.log = EventLog().bind(event_id=self.event_id)
else:
self.event_id = ""
self.log = EventLog().bind(event_id="")
if task['action'] == 'create_new_version':
self.log.info("开始发布升级应用。", step="app-image")
self.create_new_version()
elif task['action'] == 'download_and_deploy':
self.log.info("开始同步和部署应用。", step="app-image")
self.download_and_deploy()
elif task['action'] == 'delete_old_version':
self.log.info("开始删除旧版本应用。", step="app-image")
self.delete_old_version()
except Exception as e:
if self.log:
self.log.error(
"从云市部署应用失败。{}".format(e.message),
step="callback",
status="failure")
logger.exception('mq_work.app_image', e)
def create_new_version(self):
image = self.task['image']
service_key = self.task['service_key']
app_version = self.task['app_version']
oss_image = self.oss_registry.rename_image(image)
dest = self.task['dest']
share_id = self.task.get("share_id", None)
if dest == "yb":
if self.region_registry.exist_image(image):
logger.debug("mq_work.app_image",
"now local exists, oss doesnot exists")
data = {
'service_key': service_key,
'app_version': app_version,
'image': image,
'dest_yb': True,
'dest_ys': False,
'slug': ""
}
if share_id is not None:
data["share_id"] = share_id
try:
self.region_client.service_publish_new_region(data)
except Exception as e:
self.region_client.service_publish_failure_region(data)
self.log.error("云帮应用本地发布失败,保存publish 失败。{0}".format(e.message),step="callback",status="failure")
pass
if self.is_region_image and not self.oss_registry.exist_image(
oss_image):
try:
self.log.info("开始拉取镜像。")
ok = self._pull(image)
if not ok:
self.log.error(
"拉取镜像发生错误,构建退出。",
step="callback",
status="failure")
self.region_client.service_publish_failure_region(data)
return
image_id = self.get_image_property(image, 'Id')
self.log.info("拉取镜像完成。")
self._tag(image_id, oss_image)
self.log.info("镜像更改tag完成。开始上传镜像到云帮")
ok = self._push(oss_image)
if not ok:
self.log.error(
"拉取镜像发生错误,构建退出。",
step="callback",
status="failure")
self.region_client.service_publish_failure_region(data)
return
self.log.info("上传镜像到云帮完成")
# 发送通知到web
self.user_cs_client.service_publish_success(
json.dumps(data))
try:
self.region_client.service_publish_success_region(data)
except Exception as e:
logger.exception(e)
self.region_client.service_publish_failure_region(data)
pass
self.log.info(
"云帮应用发布完毕", step="last", status="success")
except (shell.ExecException, Exception), e:
logger.exception("mq_work.app_image", e)
logger.error("mq_work.app_image", e)
self.region_client.service_publish_failure_region(data)
self.log.error(
"云帮应用发布失败 {}".format(e.message),
step="callback",
status="failure")
else:
# 发送通知到web
self.user_cs_client.service_publish_success(
json.dumps(data))
try:
self.region_client.service_publish_success_region(data)
except Exception as e:
self.region_client.service_publish_failure_region(data)
logger.exception(e)
pass
self.log.info("云帮应用发布完毕", step="last", status="success")
else:
self.log.info("镜像不存在,发布失败", step="callback", status="failure")
elif dest == "ys":
# 当前有镜像并且云市的image数据中心开启
if self.region_registry.exist_image(image) and self.is_oss_image:
req = {
'service_key': service_key,
'app_version': app_version,
'image': image,
'slug': "",
'dest_ys': True,
'dest_yb': False
}
if share_id is not None:
req["share_id"] = share_id
try:
self.region_client.service_publish_new_region(data)
except Exception as e:
self.region_client.service_publish_failure_region(data)
self.log.info("云帮应用本地发布失败,保存publish 失败。{0}".format(e.message),step="callback",status="failure")
pass
# self.region_client.service_publish_new_region(req)
self.log.info("开始上传镜像到云市")
# 修改image name
hub_image = self.hubclient.rename_image(image)
logger.info("mq_work.app_image",
'hub_image={}'.format(hub_image))
# 检查是否存在
data = self.hubclient.parse_image(image)
logger.info("mq_work.app_image", 'data={}'.format(data))
# 判断tag是否存在,
tag_exists = self.hubclient.check(data.name, data.tag)
logger.info("mq_work.app_image",
'tag_exists={}'.format(tag_exists))
try:
self.log.info("开始从云帮拉取镜像。")
ok = self._pull(image)
if not ok:
self.log.error(
"拉取镜像发生错误,构建退出。",
step="callback",
status="failure")
self.region_client.service_publish_failure_region(req)
return
image_id = self.get_image_property(image, 'Id')
self.log.info("从云帮拉取镜像完成,更改镜像TAG")
self._tag(image_id, hub_image)
self.log.info("更改镜像TAG完成开始上传镜像到云市")
ok = self._push(hub_image)
if not ok:
self.log.error(
"拉取镜像发生错误,构建退出。",
step="callback",
status="failure")
self.region_client.service_publish_failure_region(req)
return
self.log.info("上传镜像到云市完成。")
# 发送通知到web
self.user_cs_client.service_publish_success(
json.dumps(req))
try:
self.region_client.service_publish_success_region(req)
except Exception as e:
self.region_client.service_publish_failure_region(req)
logger.exception(e)
pass
self.log.info("云市应用发布完毕", step="last", status="success")
except (shell.ExecException, Exception), e:
logger.exception("mq_work.app_image", e)
logger.error("mq_work.app_image", e)
self.region_client.service_publish_failure_region(req)
self.log.error(
"云市应用发布失败 {}".format(e.message),
step="callback",
status="failure")
else:
self.log.info("镜像不存在,发布失败", step="callback", status="failure")
def download_and_deploy(self):
image = self.task['image']
namespace = self.task['namespace']
tenant_name = self.task['tenant_name']
service_alias = self.task['service_alias']
event_id = self.task['event_id']
oss_image = self.oss_registry.rename_image(image)
region_download = False
try:
if not self.region_registry.exist_image(image):
self.log.debug("image is " + image)
logger.debug("mq_work.app_image",
"now check inner.goodrain.com {0}".format(
self.is_region_image))
self.log.debug("oss_image is " + oss_image)
if self.is_region_image and self.oss_registry.exist_image(
oss_image):
try:
self.log.info("云帮发现镜像,开始从内部获取。", step="app-image")
ok = self._pull(oss_image)
if not ok:
self.log.error(
"拉取镜像发生错误,构建退出。",
step="callback",
status="failure")
return
image_id = self.get_image_property(oss_image, 'Id')
self._tag(image_id, image)
ok = self._push(image)
if not ok:
self.log.error(
"上传镜像发生错误,构建退出。",
step="callback",
status="failure")
return
region_download = True
except (shell.ExecException, Exception), e:
logger.exception("mq_work.app_image", e)
logger.error("mq_work.app_image", e)
self.log.error(
"从云帮镜像仓库拉取镜像失败。" + e.__str__(), step="app-image")
# 云帮未配置,直接从云市下载|云帮下载失败,直接从云市下载
# 云市images数据中心开启可下载,否则不可下载
if not region_download and self.is_oss_image:
# 判断是否存在hub
logger.info("mq_work.app_image",
'download image from hub.goodrain.com')
self.log.info("开始从云市获取镜像。", step="app-image")
# 修改image name
hub_image = self.hubclient.rename_image(
image, namespace=namespace)
# logger.info("mq_work.app_image", '===[download]hub_image={}'.format(hub_image))
# 检查是否存在
data = self.hubclient.parse_image(image)
hub_exists = self.hubclient.check_image(
data.name, data.tag, namespace=namespace)
# logger.info("mq_work.app_image", '===[download]hub_exists={}'.format(hub_exists))
if hub_exists:
try:
self.log.info("开始拉取镜像。", step="app-image")
ok = self._pull(hub_image)
if not ok:
self.log.error(
"拉取镜像发生错误,构建退出。",
step="callback",
status="failure")
return
self.log.info("拉取镜像完成。", step="app-image")
image_id = self.get_image_property(hub_image, 'Id')
self._tag(image_id, image)
self.log.info("更改镜像TAG完成。", step="app-image")
ok = self._push(image)
if not ok:
self.log.error(
"上传镜像发生错误,构建退出。",
step="callback",
status="failure")
return
self.log.info("上传镜像到本地仓库完成。", step="app-image")
region_download = True
except (shell.ExecException, Exception), e:
logger.exception("mq_work.app_image", e)
self.log.error(
"从云市镜像仓库拉取镜像失败。" + e.__str__(),
step="app-image")
else:
logger.error("image {0} not found, can't continue".
format(hub_image))
self.log.error(
"云市未发现此镜像。{0}".format(hub_image), step="app-image")
else:
self.log.info("本地存在此镜像,无需同步", step="app-image")
region_download = True
except Exception as e:
logger.exception("mq_work.app_image", e)
self.log.error(
"同步镜像发生异常." + e.__str__(), step="app-image", status="failure")
version_status = {
"final_status":"failure",
}
if region_download:
version_body = {
"type": 'image',
"path": image,
"event_id": self.event_id
}
try:
self.region_client.update_version_region(json.dumps(version_body))
except Exception as e:
pass
version_status['final_status']="success"
self.log.info("应用同步完成,开始启动应用。", step="app-image", status="success")
body = {
"deploy_version": self.task['deploy_version'],
"event_id": self.event_id
}
try:
# self.api.start_service(tenant_name, service_alias, event_id)
self.api.upgrade_service(self.task['tenant_name'], self.task['service_alias'], json.dumps(body))
except Exception as e:
logger.exception(e)
self.log.error(
"应用自动启动失败。请手动启动", step="callback", status="failure")
else:
self.log.error("应用同步失败。", step="callback", status="failure")
try:
self.region_client.update_version_event(self.event_id,json.dumps(version_status))
except Exception as e:
self.log.error(
"更新version信息失败", step="app-image")
pass
def queryServiceStatus(self, service_id):
try:
res, body = self.region_api.is_service_running(service_id)
logger.info(
'mq_work.app_image',
"service_id=" + service_id + ";body=" + json.dumps(body))
status = body.get(service_id, "closed")
if status == "running":
self.log.debug("依赖的应用状态已经为运行中。", step="worker")
return True
except:
pass
self.log.debug("依赖的应用状态不是运行中,本应用稍后启动。", step="worker")
return False
def delete_old_version(self):
pass
def delete_oss_images(self, images):
for image in images:
deleted = self.oss_registry.delete_image(image)
logger.info("mq_work.app_image", "delete image {0} {1}".format(
image, deleted))
def get_image_property(self, image, name):
query_format = '{{.%s}}' % name
try:
output = shell.call("{2} inspect -f '{0}' {1}".format(
query_format, image, DOCKER_BIN))
if output == '<no value>':
return None
else:
return output[0].rstrip('\n')
except shell.ExecException, e:
logger.exception("mq_work.app_image", e)
return None
def update_publish_event(self, **kwargs):
body = json.dumps(kwargs)
try:
self.region_api.update_event(body)
except Exception, e:
logger.exception("mq_work.app_image", e)
def _pull(self, image):
cmd = "{} pull {}".format(DOCKER_BIN, image)
logger.info("mq_work.app_image", cmd)
retry = 2
while retry:
try:
p = shell.start(cmd)
while p.is_running():
line = p.readline()
self.log.debug(
line.rstrip('\n').lstrip('\x1b[1G'), step="pull-image")
for line in p.unread_lines:
self.log.debug(line, step="pull-image")
if p.exit_with_err():
self.log.error(
"拉取镜像失败。" + ("开始进行重试." if retry > 0 else ""),
step="pull-image",
status="failure")
retry -= 1
continue
return True
except shell.ExecException, e:
self.log.error("下载镜像发生错误。{0}" + ("开始进行重试." if retry > 0 else
"").format(e.message))
logger.error(e)
retry -= 1
return False
def _push(self, image):
cmd = "{} push {}".format(DOCKER_BIN, image)
logger.info("mq_work.app_image", cmd)
retry = 2
while retry:
try:
p = shell.start(cmd)
while p.is_running():
line = p.readline()
self.log.debug(
line.rstrip('\n').lstrip('\x1b[1G'), step="push-image")
for line in p.unread_lines:
self.log.debug(line, step="push-image")
if p.exit_with_err():
self.log.error(
"上传镜像失败。" + ("开始进行重试." if retry > 0 else ""),
step="push-image",
status="failure")
retry -= 1
continue
return True
except shell.ExecException, e:
self.log.error("上传镜像发生错误。{0}" + ("开始进行重试." if retry > 0 else
"").format(e.message))
logger.error(e)
retry -= 1
return False
def _tag(self, image_id, image):
cmd = "{2} tag {0} {1}".format(image_id, image, DOCKER_BIN)
logger.info("mq_work.app_image", cmd)
shell.call(cmd)
def splitChild(self, childs):
data = []
for lock_event_id in childs:
data.append(lock_event_id.split("/")[-1])
return data
def main():
body = ""
for line in fileinput.input(): # read task from stdin
body = line
app_image = AppImage(job=Job(body=body), config=load_dict)
app_image.do_work()
class Job():
body = ""
def __init__(self, body, *args, **kwargs):
self.body = body
def get_body(self):
return self.body
def get_task(self):
task = json.loads(self.body)
return task
if __name__ == '__main__':
main()

View File

@ -1,590 +0,0 @@
# -*- coding: utf8 -*-
import os
import json
import shutil
import time
from clients.region import RegionAPI
from clients.alioss import OssAPI
from clients.etcdcli import TaskLocker
from clients.userconsole import UserConsoleAPI
from clients.region_api import RegionBackAPI
from clients.acp_api import ACPAPI
from clients.ftputils import FTPUtils
from utils.crypt import get_md5
from utils.log import EventLog
import logging
import logging.config
from etc import settings
import fileinput
load_dict = {}
with open("plugins/config.json", 'r') as load_f:
load_dict = json.load(load_f)
logging.config.dictConfig(settings.get_logging(load_dict))
logger = logging.getLogger('default')
class AppSlug():
def __init__(self, job, *args, **kwargs):
self.job = job
self.configs = kwargs.get("config")
self.region_api = RegionAPI(conf=self.configs['region'])
self.oss_api = OssAPI(conf=self.configs['oss']['ali_shanghai'])
self.locker = TaskLocker(conf=self.configs['etcd'])
self.user_cs_client = UserConsoleAPI(conf=self.configs['userconsole'])
self.api = ACPAPI(conf=self.configs['region'])
self.region_client = RegionBackAPI()
self.slug_configs = self.configs["publish"]["slug"]
self.is_region_slug = self.slug_configs.get('all_region_ftp')
self.is_oss_ftp = self.slug_configs.get('oss_ftp')
# 用户文件存储路径
self.SRV_SLUG_BASE_DIR = self.slug_configs.get(
'slug_path') + '{tenantId}/slug/{serviceId}/{deployVersion}.tgz'
# 数据中心slug存储路径
self.SLUG_PATH = self.slug_configs.get(
'curr_region_dir') + '{serviceKey}/{appVersion}.tgz'
self.CURR_REGION_PATH = self.slug_configs.get(
'curr_region_path') + self.SLUG_PATH
# 区域中心slug的ftp配置
self.ALL_REGION_FTP_HOST = self.slug_configs.get('all_region_ftp_host')
self.ALL_REGION_FTP_PORT = self.slug_configs.get('all_region_ftp_port')
self.ALL_REGION_FTP_USERNAME = self.slug_configs.get(
'all_region_username')
self.ALL_REGION_FTP_PASSWORD = self.slug_configs.get(
'all_region_password')
self.ALL_REGION_FTP_NAMESPACE = self.slug_configs.get(
'all_region_namespace')
self.ALL_REGION_FTP_PATH = self.ALL_REGION_FTP_NAMESPACE + '{serviceKey}/{appVersion}.tgz'
# oss存储路径
CLOUD_ASSISTANT = self.configs.get('CLOUD_ASSISTANT')
self.OSS_BUCKET = self.slug_configs.get('oss_bucket', "")
self.OSS_OBJECT_NAME = CLOUD_ASSISTANT + '/{serviceKey}/{appVersion}.tgz'
logger.debug("mq_work.app_slug", 'init app slug')
def do_work(self):
try:
logger.debug("mq_work.app_slug",
'get task....{}'.format(self.job.body))
task = json.loads(self.job.body)
self.task = task
if "event_id" in self.task:
self.event_id = task["event_id"]
self.log = EventLog().bind(
event_id=self.event_id, step="image_manual")
else:
self.event_id = ""
self.log = EventLog().bind(event_id="", step="image_manual")
if task['action'] == 'create_new_version':
self.log.info("开始分享新版本应用。")
self.create_new_version()
elif task['action'] == 'download_and_deploy':
self.log.info("开始同步应用。")
self.download_and_deploy()
elif task['action'] == 'delete_old_version':
self.log.info("开始删除旧版本应用。")
self.delete_old_version()
except Exception as e:
logger.exception('mq_work.app_slug', e)
def _upload_ftp(self, service_key, app_version, md5file):
""" 上传文件到ftp """
utils = FTPUtils(
host=self.ALL_REGION_FTP_HOST,
username=self.ALL_REGION_FTP_USERNAME,
password=self.ALL_REGION_FTP_PASSWORD,
namespace=self.ALL_REGION_FTP_NAMESPACE,
port=self.ALL_REGION_FTP_PORT)
# 检查service_key对应的文件是否存在,不存在生成
service_dir = self.ALL_REGION_FTP_NAMESPACE + service_key
logger.debug("mq_work.app_slug",
'slug task is {}'.format(self.task))
logger.debug("mq_work.app_slug",
'*******upload dir is {}'.format(service_dir))
utils.check_dir(service_dir)
# 上传文件
curr_region_slug = self.CURR_REGION_PATH.format(
serviceKey=service_key, appVersion=app_version)
logger.debug("mq_work.app_slug",
'*******upload file path is {}'.format(curr_region_slug))
utils.upload(service_dir, curr_region_slug)
# 上传md5文件
if md5file:
utils.upload(service_dir, md5file)
return True
def _create_md5(self, md5string, dest_slug_file):
try:
md5file = dest_slug_file + ".md5"
f = open(md5file, "w")
f.write(md5string)
f.close()
return md5file
except Exception as e:
logger.error("mq_work.app_slug", "sum file md5 filed!")
logger.exception("mq_work.app_slug", e)
return None
def _check_md5(self, md5string, md5file):
try:
f = open(md5file)
new_md5 = f.readline()
return md5string == new_md5
except Exception as e:
logger.error("mq_work.app_slug", "check md5 filed!")
logger.exception("mq_work.app_slug", e)
return False
def create_new_version(self):
service_key = self.task['service_key']
app_version = self.task['app_version']
service_id = self.task['service_id']
deploy_version = self.task['deploy_version']
tenant_id = self.task['tenant_id']
dest = self.task['dest']
share_id = self.task.get('share_id', None)
# 检查数据中心下路径是否存在
source_slug_file = self.SRV_SLUG_BASE_DIR.format(
tenantId=tenant_id,
serviceId=service_id,
deployVersion=deploy_version)
self.log.debug("数据中心文件路径{0}".format(source_slug_file))
# 当前数据中心文件名称
dest_slug_file = self.CURR_REGION_PATH.format(
serviceKey=service_key, appVersion=app_version)
self.log.debug('当前数据中心文件名称'.format(dest_slug_file))
# 检查目录是否存在
curr_region_dir = os.path.dirname(dest_slug_file)
if not os.path.exists(curr_region_dir):
os.makedirs(curr_region_dir)
# 复制文件
self.log.debug(
"开始复制文件 file {0} to {1}".format(source_slug_file, dest_slug_file))
shutil.copyfile(source_slug_file, dest_slug_file)
# 计算md5
md5string = get_md5(source_slug_file)
# 生成md5file
md5file = self._create_md5(md5string, dest_slug_file)
if md5file is None:
self.log.error("md5文件没有生成。")
# 区域中心对象存储,使用ftp
slug = self.SLUG_PATH.format(
serviceKey=service_key, appVersion=app_version)
if dest == "yb":
data = {
'service_key': service_key,
'app_version': app_version,
'slug': slug,
'image': "",
'dest_yb': True,
'dest_ys': False,
}
if share_id is not None:
data['share_id'] = share_id
try:
self.region_client.service_publish_new_region(data)
except Exception as e:
self.region_client.service_publish_failure_region(data)
self.log.error(
"云帮应用本地发布失败,保存publish 失败。{0}".format(e.message),
step="callback",
status="failure")
pass
if self.is_region_slug:
try:
self.log.info("开始上传应用到本地云帮")
self._upload_ftp(service_key, app_version, md5file)
logger.debug("mq_work.app_slug",
"*******ftp upload success!")
# self.update_publish_event(event_id=event_id, status='end', desc=u"云帮应用本地发布完毕")
self.user_cs_client.service_publish_success(
json.dumps(data))
try:
self.region_client.service_publish_success_region(data)
except Exception as e:
self.region_client.service_publish_failure_region(data)
logger.exception(e)
pass
self.log.info("云帮应用本地发布完毕", step="last", status="success")
except Exception as e:
logger.error("mq_work.app_slug",
"*******ftp upload failed")
logger.exception("mq_work.app_slug", e)
self.region_client.service_publish_failure_region(data)
self.log.info(
"云帮应用本地发布失败。{}".format(e.message),
step="callback",
status="failure")
else:
self.user_cs_client.service_publish_success(json.dumps(data))
try:
self.region_client.service_publish_success_region(data)
except Exception as e:
self.region_client.service_publish_failure_region(data)
logger.exception(e)
pass
self.log.info("云帮应用本地发布完毕", step="last", status="success")
elif dest == "ys":
data = {
'service_key': service_key,
'app_version': app_version,
'slug': slug,
'image': "",
'dest_ys': True,
'dest_yb': False
}
if share_id is not None:
data['share_id'] = share_id
try:
self.region_client.service_publish_new_region(data)
except Exception as e:
self.region_client.service_publish_failure_region(data)
self.log.error(
"云帮应用本地发布失败,保存publish 失败。{0}".format(e.message),
step="callback",
status="failure")
pass
if self.is_oss_ftp:
try:
self.log.info("开始上传应用到云市")
self._upload_ftp(service_key, app_version, md5file)
logger.debug("mq_work.app_slug",
"*******ftp upload success!")
self.log.info("云市应用发布完毕", step="last", status="success")
self.user_cs_client.service_publish_success(
json.dumps(data))
try:
self.region_client.service_publish_success_region(data)
except Exception as e:
logger.exception(e)
self.region_client.service_publish_failure_region(data)
pass
except Exception as e:
logger.error("mq_work.app_slug",
"*******ftp upload failed, {0}".format(e))
self.region_client.service_publish_failure_region(data)
self.log.error(
"云市应用发布失败.", status="failure", step="callback")
else:
self.user_cs_client.service_publish_success(json.dumps(data))
try:
self.region_client.service_publish_success_region(data)
except Exception as e:
logger.exception(e)
self.region_client.service_publish_failure_region(data)
pass
self.log.info("云市应用发布完毕", step="last", status="success")
def _download_ftp(self, service_key, app_version, namespace, is_md5=False):
""" 云帮ftp下载文件 """
utils = FTPUtils(
host=self.ALL_REGION_FTP_HOST,
username=self.ALL_REGION_FTP_USERNAME,
password=self.ALL_REGION_FTP_PASSWORD,
namespace=self.ALL_REGION_FTP_NAMESPACE,
port=self.ALL_REGION_FTP_PORT)
logger.info("mq_work.app_slug",
"*******[download]download file from ftp")
# 检查service_key对应的文件是否存在,不存在生成
remote_file = self.ALL_REGION_FTP_PATH.format(
serviceKey=service_key, appVersion=app_version)
if is_md5:
remote_file += ".md5"
if not namespace:
logger.info("mq_work.app_slug",
"*******[download]namespace is null")
logger.error("mq_work.app_slug",
"*******[download]namespace is null")
else:
logger.info("mq_work.app_slug",
"*******[download]namespace is {}".format(namespace))
remote_file = "../" + namespace + "/" + remote_file
logger.info("mq_work.app_slug",
"*******[download]remote file is {}".format(remote_file))
curr_region_slug = self.CURR_REGION_PATH.format(
serviceKey=service_key, appVersion=app_version)
if is_md5:
curr_region_slug += ".md5"
logger.info(
"mq_work.app_slug",
"*******[download]curr_region_slug is {}".format(curr_region_slug))
return utils.download(remote_file, curr_region_slug)
def _download_ftp_market(self,
service_key,
app_version,
namespace,
is_md5=False):
""" 云市ftp下载文件 """
utils = FTPUtils(
host=self.ALL_REGION_FTP_HOST,
username=self.ALL_REGION_FTP_USERNAME,
password=self.ALL_REGION_FTP_PASSWORD,
namespace=self.ALL_REGION_FTP_NAMESPACE,
port=self.ALL_REGION_FTP_PORT)
logger.info("mq_work.app_slug",
"*******[download]download file from ftp")
# 检查service_key对应的文件是否存在,不存在生成
remote_file = self.ALL_REGION_FTP_PATH.format(
serviceKey=service_key, appVersion=app_version)
if is_md5:
remote_file += ".md5"
if not namespace:
logger.info("mq_work.app_slug",
"*******[download]namespace is null")
logger.error("mq_work.app_slug",
"*******[download]namespace is null")
else:
logger.info("mq_work.app_slug",
"*******[download]namespace is {}".format(namespace))
remote_file = "../" + namespace + "/" + remote_file
logger.info("mq_work.app_slug",
"*******[download]remote file is {}".format(remote_file))
curr_region_slug = self.CURR_REGION_PATH.format(
serviceKey=service_key, appVersion=app_version)
if is_md5:
curr_region_slug += ".md5"
logger.info(
"mq_work.app_slug",
"*******[download]curr_region_slug is {}".format(curr_region_slug))
return utils.download(remote_file, curr_region_slug)
def download_and_deploy(self):
""" 下载slug包 """
def start_service(service_id, deploy_version, operator):
# body = {
# "deploy_version": deploy_version,
# "operator": operator,
# "event_id": self.event_id
# }
body = {
"deploy_version": deploy_version,
"event_id": self.event_id
}
try:
# logger.info("mq_work.app_slug", "start service {}:{}".format(service_id, deploy_version))
self.log.info("开始调用api启动应用。")
self.api.upgrade_service(self.tenant_name, self.service_alias, json.dumps(body))
# self.region_api.start_service(service_id, json.dumps(body))
except self.region_api.CallApiError, e:
self.log.info(
"开始调用api启动应用失败。{}".format(e.message),
step="callback",
status="failure")
logger.exception("mq_work.app_slug", e)
service_key = self.task['app_key']
namespace = self.task['namespace']
app_version = self.task['app_version']
tenant_name = self.task['tenant_name']
service_alias = self.task['service_alias']
event_id = self.task['event_id']
# 检查数据中心的是否存在slug包
dest_slug_file = self.CURR_REGION_PATH.format(
serviceKey=service_key, appVersion=app_version)
logger.info("mq_work.app_slug",
"dest_slug_file:{}".format(dest_slug_file))
ftp_ok = False
try:
# 检查当前服务器是否有slug文件
if os.path.exists(dest_slug_file):
self.log.debug("当前服务器存在本应用。本机同步开始")
md5string = get_md5(dest_slug_file)
# 检查云帮ftp是否打开, 下载md5进行校验
md5_ok = False
if self.is_region_slug:
self.log.debug("文件MD5校验开始。")
try:
md5_ok = self._download_ftp(service_key, app_version,
namespace, True)
self.log.info("MD5校验完成。")
except Exception as e:
logger.info(
"mq_work.app_slug",
"download md5 file from cloudassistant ftp failed!"
)
self.log.error(
"MD5校验失败。{}".format(e.message),
step="callback",
status="failure")
logger.exception("mq_work.app_slug", e)
# md5未下载并且云市ftp开启
if not md5_ok and self.is_oss_ftp:
self.log.info("MD5校验不通过。开始从云市同步新版本。")
try:
md5_ok = self._download_ftp_market(
service_key, app_version, namespace, True)
except Exception as e:
self.log.info(
"从云市同步新版本发生异常。{}".format(e.message),
step="callback",
status="failure")
logger.exception("mq_work.app_slug", e)
if md5_ok:
md5file = dest_slug_file + ".md5"
same_file = self._check_md5(md5string, md5file)
if same_file:
logger.debug("mq_work.app_slug", "md5 check same.")
ftp_ok = True
else:
logger.debug(
"mq_work.app_slug",
"file md5 is changed, now delete old file")
os.remove(dest_slug_file)
else:
logger.debug("mq_work.app_slug",
"md5file download failed, now delete slug")
os.remove(dest_slug_file)
# 检查当前服务器是否有slug文件
if not os.path.exists(dest_slug_file):
curr_region_dir = os.path.dirname(dest_slug_file)
if not os.path.exists(curr_region_dir):
os.makedirs(curr_region_dir)
logger.debug("mq_work.app_slug",
"now check ftp:".format(self.is_region_slug))
# 云帮ftp开关是否打开
if self.is_region_slug:
logger.debug('mq_work.app_slug', 'now check file on ftp!')
try:
ftp_ok = self._download_ftp(service_key, app_version,
namespace)
except Exception as e:
logger.info("mq_work.app_slug",
"download object failed")
logger.exception("mq_work.app_slug", e)
logger.debug(
"mq_work.app_slug",
"*******[ftp download slug]result:==={}".format(
ftp_ok))
# 判断是否需要从云市上下载,未下载并且云市ftp开启
if not ftp_ok and self.is_oss_ftp:
logger.info(
"mq_work.app_slug",
"now download from hub ftp:{}".format(dest_slug_file))
ftp_ok = self._download_ftp_market(service_key,
app_version, namespace)
logger.debug(
"mq_work.app_slug",
"*******[ftp download slug]result:==={}".format(
ftp_ok))
else:
ftp_ok = True
except Exception as e:
logger.exception("mq_work.app_slug", e)
version_status = {
"final_status":"failure",
}
if ftp_ok:
self.log.info("应用同步完成,开始启动应用。", step="app-image", status="success")
version_body = {
"type": 'slug',
"path": dest_slug_file,
"event_id": self.event_id
}
version_status = {
"final_status":"success",
}
try:
self.region_client.update_version_region(json.dumps(version_body))
self.region_client.update_version_event(self.event_id,json.dumps(version_status))
except Exception as e:
pass
try:
body = {
"deploy_version": self.task['deploy_version'],
"event_id": self.event_id
}
# self.api.start_service(tenant_name, service_alias, event_id)
self.api.upgrade_service(self.task['tenant_name'], self.task['service_alias'], json.dumps(body))
except Exception as e:
logger.exception(e)
self.log.error(
"应用自动启动失败。请手动启动", step="callback", status="failure")
else:
self.log.error("应用同步失败。", step="callback", status="failure")
try:
self.region_client.update_version_event(self.event_id,json.dumps(version_status))
except Exception as e:
self.log.error("更新version信息失败", step="app-slug")
pass
def queryServiceStatus(self, service_id):
try:
res, body = self.region_api.is_service_running(service_id)
logger.info(
'mq_work.app_slug',
"service_id=" + service_id + ";body=" + json.dumps(body))
status = body.get(service_id, "closed")
if status == "running":
self.log.debug("依赖的应用状态已经为运行中。", step="worker")
return True
except:
pass
self.log.debug("依赖的应用状态不是运行中,本应用稍后启动。", step="worker")
return False
def delete_old_version(self):
pass
def delete_objects(self, objects):
def oss_delete(del_objects):
logger.info("mq_work.app_slug",
"deleting objects list: {0}".format(del_objects))
success = self.oss_api.batch_delete_objects('gr-slug', del_objects)
if success:
logger.info("mq_work.app_slug", "delete objects success")
else:
logger.info("mq_work.app_slug",
"delete objects failed, {0}".format(success))
while len(objects) > 0:
del_objects, objects = objects[:500], objects[500:]
oss_delete(del_objects)
def update_publish_event(self, **kwargs):
body = json.dumps(kwargs)
try:
self.region_api.update_event(body)
except Exception, e:
logger.exception("mq_work.app_slug", e)
def splitChild(self, childs):
data = []
for lock_event_id in childs:
data.append(lock_event_id.split("/")[-1])
return data
def main():
body = ""
for line in fileinput.input(): # read task from stdin
body = line
app_slug = AppSlug(job=Job(body=body), config=load_dict)
app_slug.do_work()
class Job():
body = ""
def __init__(self, body, *args, **kwargs):
self.body = body
def get_body(self):
return self.body
def get_task(self):
task = json.loads(self.body)
return task
if __name__ == '__main__':
main()

View File

@ -1 +0,0 @@
{ "envs": {}, "expire": 180, "deploy_version": "20170905172818", "repo_url": "--branch master --depth 1 http://code.goodrain.com/demo/2048.git", "service_id": "59fbd0a74e7dfbf594fba0f8953593f8", "event_id": "018d406d2ec6438eb8040a893db36ffe", "tenant_id": "232bd923d3794b979974bb21b863608b", "action": "upgrade", "operator": "barnett2" }

View File

@ -1,571 +0,0 @@
# -*- coding: utf8 -*-
import os
import sys
from utils.parse_dockerfile import ParseDockerFile
from utils.log import EventLog
import logging
import logging.config
from etc import settings
import re
import json
import time
import datetime
import pipes
import shutil
import threading
import fileinput
from utils.shell import Executer as shell
from utils.docker import DockerfileItem
from clients.region import RegionAPI
from clients.region_api import RegionBackAPI
from clients.acp_api import ACPAPI
from clients.userconsole import UserConsoleAPI
load_dict = {}
with open("plugins/config.json", 'r') as load_f:
load_dict = json.load(load_f)
logging.config.dictConfig(settings.get_logging(load_dict))
logger = logging.getLogger('default')
reload(sys)
sys.setdefaultencoding('utf-8')
TENANT_DIR = '/grdata/build/tenant/{tenantId}'
SOURCE_DIR = '/cache/build/{tenantId}' + '/' + 'source/{serviceId}'
TGZ_DIR = TENANT_DIR + '/' + 'slug/{serviceId}'
CACHE_DIR = '/cache/build/{tenantId}' + '/' + 'cache/{serviceId}'
BUILD_LOG_DIR = '/grdata/logs/{tenantId}/{serviceId}/'
CLONE_TIMEOUT = 180
REGISTRY_DOMAIN = 'goodrain.me'
MAX_BUILD_TASK = 5
if os.access("/var/run/docker.sock", os.W_OK):
DOCKER_BIN = "docker"
else:
DOCKER_BIN = "sudo -P docker"
class RepoBuilder():
def __init__(self, task, *args, **kwargs):
self.configs = kwargs.get("config")
self.region_api = RegionAPI(conf=self.configs['region'])
self.api = ACPAPI(conf=self.configs['region'])
self.user_cs_client = UserConsoleAPI(conf=self.configs['userconsole'])
self.repo_url = task['repo_url']
self.region_client = RegionBackAPI()
self.tenant_id = task['tenant_id']
self.service_id = task['service_id']
self.tenant_name = task['tenant_name']
self.service_alias = task['service_alias']
self.deploy_version = task['deploy_version']
self.action = task['action']
if 'event_id' in task:
self.event_id = task["event_id"]
self.log = EventLog().bind(event_id=self.event_id)
else:
self.event_id = ""
self.log = EventLog().bind(event_id=self.event_id)
self.operator = task['operator']
self.build_envs = task.get('envs', {})
self.expire = task.get('expire', 60)
self.start_time = int(time.time())
# self.source_dir = '/tmp/goodrain_web'
self.source_dir = SOURCE_DIR.format(
tenantId=self.tenant_id, serviceId=self.service_id)
self.cache_dir = CACHE_DIR.format(
tenantId=self.tenant_id, serviceId=self.service_id)
self.tgz_dir = TGZ_DIR.format(
tenantId=self.tenant_id, serviceId=self.service_id)
self.build_log_dir = BUILD_LOG_DIR.format(
tenantId=self.tenant_id, serviceId=self.service_id)
self.build_cmd = 'plugins/scripts/build.pl'
@property
def build_name(self):
return self.service_id[:8] + '_' + self.deploy_version
@property
def is_expired(self):
if hasattr(self, 'expire'):
current_time = int(time.time())
return bool(current_time - self.start_time > self.expire)
else:
return False
def prepare(self):
if os.path.exists(self.source_dir):
shutil.rmtree(self.source_dir)
for d in (self.source_dir, self.cache_dir, self.tgz_dir,
self.build_log_dir):
if not os.path.exists(d):
os.makedirs(d)
os.chown(self.tgz_dir, 200, 200)
os.chown(self.cache_dir, 200, 200)
def clone(self):
self.log.info("开始拉取代码。。", step="build-worker")
# code, output = shell.runsingle("git clone --branch master --depth 1 {0} {1}".format(self.repo_url, self.source_dir))
result = False
num = 0
while num < 2:
try:
cmdstr = "timeout -k 9 {2} git clone {0} {1}".format(self.repo_url, self.source_dir, CLONE_TIMEOUT)
if "github.com" in self.repo_url:
cmdstr = "timeout -k 9 {2} git clone -c http.proxy=http://127.0.0.1:18888 {0} {1}".format(self.repo_url, self.source_dir, CLONE_TIMEOUT)
shell.call(cmdstr)
result = True
break
except shell.ExecException, e:
num = num + 1
self.prepare()
if num < 2:
self.log.error(
"拉取代码发生错误,开始重试 {}".format(e.message),
status="failure",
step="worker-clone")
else:
self.log.error(
"拉取代码发生错误,部署停止 {}".format(e.message),
status="failure",
step="callback")
logger.exception('build_work.main', e)
result = False
logger.info('build_work.main', "git clone num=" + str(num))
return result
def get_commit_info(self):
try:
output = shell.call(
"""git log -n 1 --pretty --format='{"hash":"%H","author":"%an","timestamp":%at}'""",
self.source_dir)
if type(output) is list:
output = output[0]
jdata = json.loads(output)
output2 = shell.call("""git log -n 1 --pretty --format=%s""",
self.source_dir)
if type(output2) is list:
subject = output2[0]
jdata['subject'] = subject
else:
jdata['subject'] = 'unknown'
return jdata
except shell.ExecException, e:
logger.exception('build_work.main', e)
return "{}"
def find_dockerfile(self):
return bool(
os.path.exists('{0}/{1}'.format(self.source_dir, 'Dockerfile')))
def rewrite_files(self, dockerfile, insert_lines, cmd, entrypoint):
extend_lines = map(lambda x: x + '\n', insert_lines)
try:
f = open(dockerfile, 'r')
lines = f.readlines()
for line in lines:
if line.startswith('ENTRYPOINT') or line.startswith('CMD'):
lines.remove(line)
lines.extend(extend_lines)
f.close()
f = open(dockerfile, 'w')
f.writelines(lines)
f.close()
shutil.copytree('./lib/.goodrain',
'{0}/.goodrain'.format(self.source_dir))
if entrypoint is not None:
entrypoint_cmd = ' '.join(entrypoint)
shell.call(
'''sed -i -e 's#_type_#ENTRYPOINT#' -e 's#^_entrypoint_#'{0}'#' .goodrain/init'''.
format(pipes.quote(entrypoint_cmd)),
cwd=self.source_dir)
if cmd is not None:
shell.call(
'''sed -i -e 's#^_cmd_#'{0}'#' .goodrain/init'''.
format(pipes.quote(cmd)),
cwd=self.source_dir)
else:
shell.call(
'''sed -i -e 's#_type_#CMD#' -e 's#^_cmd_#'{0}'#' .goodrain/init'''.
format(pipes.quote(cmd)),
cwd=self.source_dir)
return True
except (shell.ExecException, OSError), e:
logger.exception('build_work.main', e)
return False
def get_dockerfile_items(self, filename):
f = open(filename, 'r')
lines = map(lambda x: x.rstrip('\n').rstrip('\r'), f.readlines())
items = {"port": 0, "volume": ""}
entrypoint = None
cmd = None
for line in lines:
i = DockerfileItem(line)
if i.is_port_item:
items['port'] = i.value
elif i.is_volume_item:
items['volume'] = i.value
elif i.is_entrypoint_item:
entrypoint = i.value
elif i.is_cmd_item:
cmd = ' '.join([pipes.quote(e) for e in i.value])
# env = ','.join(map(lambda x: '{0}={1}'.format(x[0], x[1]), items.get('env', {}).items()))
volume_mount_path = items.get('volume')
inner_port = items.get('port')
# 过滤tcp,udp
if isinstance(inner_port, basestring):
inner_port = inner_port.replace("/tcp", "")
inner_port = inner_port.replace("/udp", "")
inner_port = inner_port.replace('"', '')
return {
"inner_port": inner_port,
"volume_mount_path": volume_mount_path
}, entrypoint, cmd
def build_image(self):
# self.write_build_log(u"开始编译Dockerfile")
'''
insert_lines = [
'RUN which wget || (apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y wget) || (yum install -y wget)',
'RUN which curl || (apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y curl) || (yum install -y curl)',
'RUN mkdir -pv /opt/bin',
'ADD ./.goodrain/init /opt/bin/init',
'',
'RUN wget http://lang.goodrain.me/public/gr-listener -O /opt/bin/gr-listener -q && \\',
' chmod 755 /opt/bin/*',
'',
'RUN rm -rf /var/lib/dpkg/* /var/lib/apt/*',
'ENTRYPOINT ["/opt/bin/init"]',
]
'''
dockerfile = '{0}/{1}'.format(self.source_dir, 'Dockerfile')
update_items, entrypoint, cmd = self.get_dockerfile_items(dockerfile)
# 重新解析dockerfile
pdf = None
try:
self.log.info("开始解析Dockerfile", step="build_image")
pdf = ParseDockerFile(dockerfile)
except ValueError as e:
self.log.error(
"用户自定义的volume路径包含相对路径,必须为绝对路径!",
step="build_image",
status="failure")
logger.exception(e)
return False
except Exception as e:
self.log.error(
"解析Dockerfile发生异常", step="build_image", status="failure")
logger.exception(e)
s = self.repo_url
regex = re.compile(r'.*(?:\:|\/)([\w\-\.]+)/([\w\-\.]+)\.git')
m = regex.match(s)
account, project = m.groups()
_name = '_'.join([self.service_id[:12], account, project])
_tag = self.deploy_version
build_image_name = '{0}/{1}:{2}'.format(REGISTRY_DOMAIN, _name, _tag)
# image name must be lower
build_image_name = build_image_name.lower()
self.log.debug(
"构建镜像名称为{0}".format(build_image_name), step="build_image")
#build_image_name=""
no_cache = self.build_envs.pop('NO_CACHE', False)
if no_cache:
build_cmd = "{0} build -t {1} --no-cache .".format(
DOCKER_BIN, build_image_name)
else:
build_cmd = "{0} build -t {1} .".format(DOCKER_BIN,
build_image_name)
p = shell.start(build_cmd, cwd=self.source_dir)
while p.is_running():
line = p.readline()
self.log.debug(line, step="build_image")
for line in p.unread_lines:
self.log.debug(line, step="build_image")
if p.exit_with_err():
self.log.error(
"构建失败请检查Debug日志排查", step="build_image", status="failure")
return False
self.log.debug("镜像构建成功。开始推送", step="build_image", status="success")
try:
shell.call("{0} push {1}".format(DOCKER_BIN, build_image_name))
except shell.ExecException, e:
self.log.error(
"镜像推送失败。{}".format(e.message),
step="push_image",
status="failure")
return False
update_items.update({"image": build_image_name})
# ports volums envs
if pdf:
update_items.update({
"port_list": pdf.get_expose(),
"volume_list": pdf.get_volume()
})
h = self.user_cs_client
try:
h.update_service(self.service_id, json.dumps(update_items))
self.region_client.update_service_region(self.service_id, json.dumps(update_items))
except h.CallApiError, e:
self.log.error(
"网络异常,更新应用镜像名称失败. {}".format(e.message),
step="update_image",
status="failure")
return False
version_body = {
"type": 'image',
"path": build_image_name,
"event_id": self.event_id
}
try:
self.region_client.update_version_region(json.dumps(version_body))
except Exception as e:
self.log.error(
"更新版本信息失败{0}失败{1}".format(self.event_id, e.message),
step="build_code")
pass
return True
def build_code(self):
self.log.info("开始编译代码包", step="build_code")
package_name = '{0}/{1}.tgz'.format(self.tgz_dir, self.deploy_version)
self.logfile = '{0}/{1}.log'.format(self.tgz_dir, self.deploy_version)
repos = self.repo_url.split(" ")
self.log.debug("repos=" + repos[1], step="build_code")
#master
no_cache = self.build_envs.pop('NO_CACHE', False)
if no_cache:
try:
shutil.rmtree(self.cache_dir)
os.makedirs(self.cache_dir)
os.chown(self.cache_dir, 200, 200)
self.log.debug(
"清理缓存目录{0}".format(self.cache_dir), step="build_code")
except Exception as e:
self.log.error(
"清理缓存目录{0}失败{1}".format(self.cache_dir, e.message),
step="build_code")
pass
try:
cmd = "perl {0} -b {1} -s {2} -c {3} -d {4} -v {5} -l {6} -tid {7} -sid {8} --name {9}".format(
self.build_cmd, repos[1], self.source_dir, self.cache_dir,
self.tgz_dir, self.deploy_version, self.logfile,
self.tenant_id, self.service_id, self.build_name)
if self.build_envs:
build_env_string = ':::'.join(
map(lambda x: "{}='{}'".format(x, self.build_envs[x]),
self.build_envs.keys()))
cmd += " -e {}".format(build_env_string)
p = shell.start(cmd)
while p.is_running():
line = p.readline()
self.log.debug(
line.rstrip('\n').lstrip('\x1b[1G'), step="build_code")
for line in p.unread_lines:
self.log.debug(line, step="build_code")
if p.exit_with_err():
self.log.error("编译代码包失败。", step="build_code", status="failure")
return False
self.log.debug("编译代码包完成。", step="build_code", status="success")
except shell.ExecException, e:
self.log.error(
"编译代码包过程遇到异常,{}".format(e.message),
step="build_code",
status="failure")
return False
try:
package_size = os.path.getsize(package_name)
if package_size == 0:
self.log.error(
"构建失败构建包大小为0 name {0}".format(package_name),
step="build_code",
status="failure")
return False
except OSError, e:
logger.exception('build_work.main', e)
self.log.error("代码构建失败构建包未生成。查看Debug日志检查错误详情", step="build_code", status="failure")
return False
self.log.info("代码构建完成", step="build_code", status="success")
version_body = {
"type": 'slug',
"path": package_name,
"event_id": self.event_id
}
try:
self.region_client.update_version_region(json.dumps(version_body))
except Exception as e:
logger.exception("build_work.main", e)
pass
return True
def feedback(self):
time.sleep(2)
body = {
"deploy_version": self.deploy_version,
"event_id": self.event_id
}
try:
if self.action == 'deploy':
self.log.info("开始部署应用。", step="app-deploy")
self.api.upgrade_service(self.tenant_name, self.service_alias, json.dumps(body))
# 调用升级接口如果没有启动则触发start操作
# h.deploy_service(self.service_id, json.dumps(body))
elif self.action == 'upgrade':
self.log.info("开始升级应用。", step="app-deploy")
self.api.upgrade_service(self.tenant_name, self.service_alias, json.dumps(body))
return True
except self.api.CallApiError, e:
self.log.error(
"部署应用时调用API发生异常。{}".format(e.message), step="app-deploy")
logger.exception('build_work.main', e)
return False
def run(self):
try:
self.prepare()
if self.clone():
commit_info = self.get_commit_info()
#can req api to update code info
self.log.info("代码拉取成功。", step="build-worker")
self.log.info(
"版本:{0} 上传者:{1} Commit:{2} ".format(
commit_info["hash"][0:7], commit_info["author"],
commit_info["subject"]),
step="code-version",
status="success")
version_body = {
"code_version":commit_info["hash"][0:7],
"code_commit_msg":commit_info["subject"],
"code_commit_author":commit_info["author"]
}
try:
self.region_client.update_version_event(self.event_id,json.dumps(version_body))
except Exception as e:
pass
if self.find_dockerfile():
self.log.info(
"代码识别出Dockerfile,直接构建镜像。", step="build-worker")
build_func = getattr(self, 'build_image')
else:
self.log.info("开始代码构建", step="build-worker")
build_func = getattr(self, 'build_code')
success = build_func()
if success:
# self.log.info("构建完成。", step="build-worker")
version_body = {
"final_status":"success",
}
self.log.info("构建完成。", step="build-worker", status="success")
ok = self.feedback()
if not ok:
self.log.error(
"升级部署应用错误", step="callback", status="failure")
else:
self.log.info("构建失败,请查看Debug构建日志", step="callback", status="failure")
version_body = {
"final_status":"failure",
}
try:
self.region_client.update_version_event(self.event_id,json.dumps(version_body))
except Exception as e:
self.log.error(
"更新version信息失败", step="build-worker")
pass
else:
self.log.error("代码拉取失败。", step="callback", status="failure")
version_body = {
"final_status":"failure",
}
try:
self.region_client.update_version_event(self.event_id,json.dumps(version_body))
except Exception as e:
self.log.error(
"更新version信息失败", step="build-worker")
pass
except Exception as e:
self.log.error(
"代码构建发生异常.{}".format(e.message),
step="callback",
status="failure")
version_body = {
"final_status":"failure",
}
try:
self.region_client.update_version_event(self.event_id,json.dumps(version_body))
except Exception as e:
self.log.error(
"更新version信息失败", step="build-worker")
pass
logger.exception('build_work.main', e)
raise e
def update_service_region(self, service_id, body):
#todo 127.0.0.1:3333/api/codecheck
# url = self.base_url + '/api/services/{0}'.format(service_id)
url = 'http://127.0.0.1:3228/v2/builder/codecheck/{0}'.format(service_id)
res, body = self._put(url, self.default_headers, body)
def main():
body = ""
for line in fileinput.input(): # read task from stdin
body = line
builder = RepoBuilder(task=Job(body).get_task(), config=load_dict)
builder.run()
class Job():
body = ""
def __init__(self, body, *args, **kwargs):
self.body = body
def get_body(self):
return self.body
def get_task(self):
task = json.loads(self.body)
return task
if __name__ == '__main__':
main()

View File

@ -1,289 +0,0 @@
# -*- coding: utf8 -*-
import socket
import json
import httplib
import httplib2
from urlparse import urlparse
import logging
from addict import Dict
logger = logging.getLogger('default')
def parse_url(url):
if not url.startswith('http'):
url = 'http://{}'.format(url)
p = urlparse(url)
items = p.netloc.split(':')
if len(items) == 2:
host = items[0]
port = int(items[1])
else:
host = items[0]
port = 443 if p.scheme == 'https' else 80
info = Dict()
info.scheme = p.scheme
info.host = host
info.port = port
info.path = p.path
return info
class Response(dict):
"""Is this response from our local cache"""
fromcache = False
version = 11
status = 200
reason = "Ok"
previous = None
def __init__(self, info):
if isinstance(info, httplib.HTTPResponse):
for key, value in info.getheaders():
self[key.lower()] = value
self.status = info.status
self['status'] = str(self.status)
self.reason = info.reason
self.version = info.version
def __getattr__(self, name):
if name == 'dict':
return self
else:
raise AttributeError(name)
class SuperHttpClient(object):
class CallApiError(Exception):
def __init__(self, apitype, url, method, res, body, describe=None):
self.message = {
"apitype": apitype,
"url": url,
"method": method,
"httpcode": res.status,
"body": body,
}
self.status = res.status
def __str__(self):
return json.dumps(self.message)
class ApiSocketError(CallApiError):
pass
scheme = 'http'
port = 80
base_url = ''
apitype = 'not design'
def __init__(self, endpoint, timeout=25, raise_error_code=True, log_request=True, retry_count=2):
parsed = parse_url(endpoint)
self.host = parsed.host
if parsed.scheme == 'https':
self.scheme = 'https'
if bool(parsed.port):
self.port = parsed.port
if parsed.port == 443:
self.scheme = 'https'
if bool(parsed.path):
self.base_url = parsed.path
self.timeout = timeout
self.raise_error_code = raise_error_code
self.log_request = log_request
self.retry_count = retry_count
def get_connection(self, *args, **kwargs):
if self.scheme == 'https':
conn = httplib.HTTPSConnection(self.host, self.port, timeout=self.timeout)
else:
conn = httplib.HTTPConnection(self.host, self.port, timeout=self.timeout)
return conn
def _jsondecode(self, string):
try:
pybody = json.loads(string)
except ValueError:
pybody = {"raw": string}
return pybody
def do_log(self, url, method, body, response, content):
if int(response['content-length']) > 1000:
record_content = '%s .....ignore.....' % content[:1000]
else:
record_content = content
if body is not None and len(body) > 1000:
record_body = '%s .....ignore.....' % body[:1000]
else:
record_body = body
logger.debug('request', '''{0} "{1}" body={2} response: {3} ------------- and content is {4}'''.format(method, url, record_body, response, record_content))
def _request(self, url, method, headers={}, body=None):
retry_count = self.retry_count
while retry_count:
try:
conn = self.get_connection()
conn.request(method, url, headers=headers, body=body)
res = conn.getresponse()
response = Response(res)
content = res.read()
try:
if res.status / 100 == 2:
if self.log_request:
self.do_log(url, method, body, response, content)
else:
self.do_log(url, method, body, response, content)
except Exception, e:
logger.error("request", e)
if response['content-type'].startswith('application/json'):
content = self._jsondecode(content)
if isinstance(content, dict):
content = Dict(content)
if res.status / 100 != 2 and self.raise_error_code:
raise self.CallApiError(self.apitype, url, method, res, body)
return response, content
except (socket.error, socket.timeout), e:
logger.exception('client_error', e)
retry_count -= 1
if retry_count:
logger.error("client_error", "retry request: %s" % url)
else:
raise self.ApiSocketError(self.apitype, url, method, Dict({"status": 101}), {"type": "connect error", "error": str(e)})
def _get(self, url, headers={}):
response, content = self._request(url, 'GET', headers=headers)
return response, content
def _post(self, url, headers={}, body=None):
response, content = self._request(url, 'POST', headers=headers, body=body)
return response, content
def _put(self, url, headers={}, body=None):
response, content = self._request(url, 'PUT', headers=headers, body=body)
return response, content
def _delete(self, url, headers={}, body=None):
response, content = self._request(url, 'DELETE', headers=headers, body=body)
return response, content
class BaseHttpClient(object):
class CallApiError(Exception):
def __init__(self, apitype, url, method, res, body, describe=None):
self.message = {
"apitype": apitype,
"url": url,
"method": method,
"httpcode": res.status,
"body": body,
}
self.status = res.status
def __str__(self):
return json.dumps(self.message)
class ApiSocketError(CallApiError):
pass
def __init__(self, *args, **kwargs):
self.apitype = 'Not specified'
#self.report = Dict({"ok":True})
def _jsondecode(self, string):
try:
pybody = json.loads(string)
except ValueError:
pybody = {"raw": string}
return pybody
def _check_status(self, url, method, response, content):
res = Dict(response)
res.status = int(res.status)
body = self._jsondecode(content)
if isinstance(body, dict):
body = Dict(body)
if 400 <= res.status <= 600:
raise self.CallApiError(self.apitype, url, method, res, body)
else:
return res, body
def _request(self, url, method, headers=None, body=None):
try:
http = httplib2.Http(timeout=25)
if body is None:
response, content = http.request(url, method, headers=headers)
else:
response, content = http.request(url, method, headers=headers, body=body)
if len(content) > 1000:
record_content = '%s .....ignore.....' % content[:1000]
else:
record_content = content
if body is not None and len(body) > 1000:
record_body = '%s .....ignore.....' % body[:1000]
else:
record_body = body
logger.debug(
'request', '''{0} "{1}" body={2} response: {3} \nand content is {4}'''.format(method, url, record_body, response, record_content))
return response, content
except socket.timeout, e:
logger.exception('client_error', e)
raise self.CallApiError(self.apitype, url, method, Dict({"status": 101}), {"type": "request time out", "error": str(e)})
except socket.error, e:
logger.exception('client_error', e)
raise self.ApiSocketError(self.apitype, url, method, Dict({"status": 101}), {"type": "connect error", "error": str(e)})
def _get(self, url, headers):
response, content = self._request(url, 'GET', headers=headers)
res, body = self._check_status(url, 'GET', response, content)
return res, body
def _post(self, url, headers, body=None):
if body is not None:
response, content = self._request(url, 'POST', headers=headers, body=body)
else:
response, content = self._request(url, 'POST', headers=headers)
res, body = self._check_status(url, 'POST', response, content)
return res, body
def _put(self, url, headers, body=None):
if body is not None:
response, content = self._request(url, 'PUT', headers=headers, body=body)
else:
response, content = self._request(url, 'PUT', headers=headers)
res, body = self._check_status(url, 'PUT', response, content)
return res, body
def _delete(self, url, headers, body=None):
if body is not None:
response, content = self._request(url, 'DELETE', headers=headers, body=body)
else:
response, content = self._request(url, 'DELETE', headers=headers)
res, body = self._check_status(url, 'DELETE', response, content)
return res, body

View File

@ -1,42 +0,0 @@
# -*- coding: utf8 -*-
import logging
import json
import os
from _base import BaseHttpClient
logger = logging.getLogger('default')
class ACPAPI(BaseHttpClient):
def __init__(self, conf=None, *arg, **kwargs):
super(ACPAPI, self).__init__()
self._name = 'region'
self.default_headers = {"Content-Type": "application/json"}
if conf["token"] is not None:
self.default_headers.update({
"Authorization":
"Token {}".format(conf["token"])
})
if conf["url"] is None:
self.base_url = 'http://region.goodrain.me:8888'
else:
self.base_url = conf["url"]
def upgrade_service(self, tenant_name, service_alias, body):
url = self.base_url + \
'/v2/tenants/{0}/services/{1}/upgrade'.format(tenant_name, service_alias)
logger.exception("url is {}".format(url))
res, body = self._post(url, self.default_headers, body)
return res, body
def start_service(self, tenant_name, service_alias, event_id):
url = self.base_url + \
'/v2/tenants/{0}/services/{1}/start'.format(tenant_name, service_alias)
res, body = self._post(url, self.default_headers, json.dumps({"event_id": event_id}))
return res, body
def update_iamge(self, tenant_name, service_alias, image_name):
url = self.base_url + \
'/v2/tenants/{0}/services/{1}'.format(tenant_name, service_alias)
res, body = self._put(url, self.default_headers, json.dumps({"image_name": image_name}))
return res, body

View File

@ -1,15 +0,0 @@
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, BASE_DIR + '/lib')
from oss.oss_api import OssAPI as API
class OssAPI(API):
def __init__(self, conf, *args, **kwargs):
API.__init__(self, conf["endpoint"], conf["id"], conf["secret"])
self.timeout = 90

View File

@ -1,86 +0,0 @@
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, BASE_DIR + '/lib')
from utils.format import JSON, to_dict
from addict import Dict
import aliyun.api
class AliyunAPI(object):
def __init__(self, conf=None, RegionId=None, *args, **kwargs):
aliyun.setDefaultAppInfo('nMscVs3CaIXPEDUd', 'g4RWmftifuJxqUdqEWc69h0exO2V46')
self.api = aliyun.api
self.region_id = RegionId if RegionId is not None else ""
def list_instances(self, RegionId=None, InstanceIds=None, dict_key=None):
'''
InstanceIds is a list
'''
m = self.api.Ecs20140526DescribeInstancesRequest()
m.RegionId = self.region_id if RegionId is None else RegionId
if InstanceIds is not None:
m.InstanceIds = JSON.dumps(InstanceIds)
response = m.getResponse()
try:
res_list = []
for i in response['Instances']['Instance']:
item = Dict({
"InstanceName": i['InstanceName'],
"InstanceId": i['InstanceId'],
"Ip": i['VpcAttributes']['PrivateIpAddress']['IpAddress'][0]
})
res_list.append(item)
if dict_key is not None:
return to_dict(res_list, dict_key)
else:
return res_list
except Exception:
return None
def get_slb_backservers(self, LoadBalancerId, dict_key=None):
m = self.api.Slb20140515DescribeLoadBalancerAttributeRequest()
m.LoadBalancerId = LoadBalancerId
response = m.getResponse()
try:
res_list = []
for item in response['BackendServers']['BackendServer']:
res_list.append(Dict(item))
if dict_key is not None:
return to_dict(res_list, dict_key)
else:
return res_list
except Exception:
return None
def set_slb_backservers(self, LoadBalancerId, BackendServers):
m = self.api.Slb20140515SetBackendServersRequest()
m.LoadBalancerId = LoadBalancerId
m.BackendServers = JSON.dumps(BackendServers)
response = m.getResponse()
return bool('Code' not in response)
def get_slb_backserver_health(self, LoadBalancerId, dict_key=None):
m = self.api.Slb20140515DescribeHealthStatusRequest()
m.LoadBalancerId = LoadBalancerId
response = m.getResponse()
try:
res_list = []
for item in response['BackendServers']['BackendServer']:
res_list.append(Dict(item))
if dict_key is not None:
return to_dict(res_list, dict_key)
else:
return res_list
except Exception:
return None

View File

@ -1,126 +0,0 @@
import uuid
import etcd
import logging
import socket
import threading
from addict import Dict
logger = logging.getLogger('default')
class BasicLocker(object):
def __init__(self, conf, *args, **kwargs):
self.etcd_cfg = conf
self.etcdClient = None
self.base_path = '/goodrain/locks'
def get_etcd_cli(self):
if self.etcdClient is None:
self.etcdClient = etcd.Client(host=self.etcd_cfg.get('host'), port=self.etcd_cfg.get('port'), allow_redirect=True)
return self.etcdClient
class TaskLocker(BasicLocker):
def __init__(self, conf):
super(TaskLocker, self).__init__(conf)
self.basic_path = '/goodrain/locks/tasks'
def exists(self, lock_id):
try:
path = self.base_path + '/' + lock_id
self.get_etcd_cli().get(path)
return True
except Exception as e:
pass
return False
def add_lock(self, lock_id, value):
path = self.base_path + '/' + lock_id
self.get_etcd_cli().set(path, value)
def _childs(self, key):
childs = {}
try:
r = self.get_etcd_cli().read(key, recursive=True, sorted=True)
for child in r.children:
if child.dir:
tem = self._childs(child.key)
childs.update(tem)
else:
childs[child.key] = child.value
except Exception:
pass
return childs
def get_children(self, lock_id):
childs = []
try:
event_path = self.base_path + '/' + lock_id
r = self.get_etcd_cli().read(event_path, recursive=True, sorted=True)
for child in r.children:
if child.dir:
tem = self._childs(child.key)
childs.extend(tem)
else:
childs.append(child.key)
except Exception as e:
logger.exception(e)
return childs
def get_lock_event(self, lock_id, event_id):
event_path = self.base_path + '/' + lock_id + '/' + event_id
try:
res = self.get_etcd_cli().get(event_path)
if not res.dir:
return res.value
except Exception:
pass
return ""
def remove_lock_event(self, lock_id, event_id):
event_path = self.base_path + '/' + lock_id + '/' + event_id
self.get_etcd_cli().delete(event_path, recursive=True)
def drop_lock(self, lock_id):
event_path = self.base_path + '/' + lock_id
self.get_etcd_cli().delete(event_path, recursive=True)
def release_lock(self):
self.etcdClient = None
class InstanceLocker(BasicLocker):
def __init__(self, renewSecondsPrior=5, timeout=None):
conf = Dict({"host": "127.0.0.1", "port": 4001})
super(InstanceLocker, self).__init__(conf)
self.base_path = '/goodrain/locks/instances'
self.client = self.get_etcd_cli()
def get_lock(self, instance_name):
key = self.base_path + '/' + instance_name.lstrip('/')
try:
return self.client.get(key)
except etcd.EtcdKeyNotFound:
return None
def add_lock(self, instance_name, value, ttl=60):
key = self.base_path + '/' + instance_name.lstrip('/')
try:
self.client.write(key, value, prevExist=False, recursive=True, ttl=ttl)
except etcd.EtcdAlreadyExist:
return False
return True
def update_lock(self, instance_name, value, ttl=60):
key = self.base_path + '/' + instance_name.lstrip('/')
self.client.write(key, value, ttl=ttl)
return True
def drop_lock(self, instance_name):
key = self.base_path + '/' + instance_name.lstrip('/')
try:
self.client.delete(key, prevExist=True)
except Exception as e:
print e

View File

@ -1,161 +0,0 @@
#!/usr/bin/env python
# coding=utf-8
import os
import ftplib
from ftplib import FTP
import logging
logger = logging.getLogger('default')
class FTPUtils:
""" 公用存储区域上传下载文件 """
def __init__(self, host, username, password, namespace, port=22, timeout=30):
self.host = str(host)
self.port = str(port)
self.timeout = timeout
self.username = str(username)
self.password = str(password)
self.namespace = str(namespace)
# 检查命名空间是否存在,并创建目录
# self.check_dir(self.namespace)
def _init_ftp(self):
f = FTP()
# f.set_debuglevel(2)
f.connect(self.host, self.port, self.timeout)
f.login(self.username, self.password)
f.set_debuglevel(2)
f.set_pasv(1)
return f
def check_dir(self, dirname, f=None):
""" 检查用户根目录下, dirname是否存在 """
try:
# ftp不存在,初始化ftp
if not f:
f = self._init_ftp()
# 初始化的ftp需要关闭
_is_close = True
# 使用分隔符处理路径
dirs = dirname.split('/')
for tmpdir in dirs:
if tmpdir.strip():
# 检查dir是否存在,空目录或者不存在
tmplist = f.nlst()
if tmpdir not in tmplist:
f.mkd(tmpdir)
# tmplist = f.nlst(tmpdir)
# if not tmplist:
# tmplist = f.nlst()
# # 检查是否存在
# if tmpdir not in tmplist:
# f.mkd(tmpdir)
f.cwd(tmpdir)
# 函数中厨时候的ftp需要关闭
if _is_close:
f.quit()
except ftplib.all_errors as e:
raise e
def create_dir(self, dirname):
""" 检查用户根目录下, dirname是否存在 """
try:
f = self._init_ftp()
# 使用分隔符处理路径
dirs = dirname.split('/')
for tmpdir in dirs:
if tmpdir.strip():
# 检查dir是否存在,空目录或者不存在
tmplist = f.nlst()
if tmpdir not in tmplist:
f.mkd(tmpdir)
# tmplist = f.nlst(tmpdir)
# if not tmplist:
# tmplist = f.nlst()
# # 检查是否存在
# if tmpdir not in tmplist:
# f.mkd(tmpdir)
f.cwd(tmpdir)
# 函数中厨时候的ftp需要关闭
f.quit()
except ftplib.all_errors as e:
raise e
def delete_dir(self, dirname):
"""删除文件"""
try:
f = self._init_ftp()
parent = os.path.dirname(dirname)
f.cwd(parent)
filename = os.path.basename(dirname)
f.rmd(filename)
f.quit()
return True
except ftplib.all_errors as e:
raise e
def delete_file(self, filepath):
"""删除文件"""
try:
f = self._init_ftp()
parent = os.path.dirname(filepath)
f.cwd(parent)
filename = os.path.basename(filepath)
f.delete(filename)
f.quit()
return True
except ftplib.all_errors as e:
raise e
def download(self, remote_file, localfile):
try:
f = self._init_ftp()
remote_dir = os.path.dirname(remote_file)
remote_file = os.path.basename(remote_file)
logger.debug("mq_work.app_slug", "remote:{}/{}".format(remote_dir, remote_file))
f.cwd(remote_dir)
tmplist = f.nlst(remote_file)
logger.debug("mq_work.app_slug", tmplist)
if tmplist:
with open(localfile, 'wb') as contents:
f.retrbinary('RETR %s' % remote_file, contents.write)
f.quit()
return True
else:
f.quit()
return False
except ftplib.all_errors as e:
raise e
def upload(self, remote_dir, localfile):
""" 上传文件到ftp """
try:
f = self._init_ftp()
f.cwd(remote_dir)
filename = os.path.basename(localfile)
with open(localfile, 'rb') as contents:
f.storbinary('STOR %s' % filename, contents)
f.quit()
except ftplib.all_errors as e:
raise e
def checkFile(self, remote_file):
try:
f = self._init_ftp()
remote_dir = os.path.dirname(remote_file)
file_name = os.path.basename(remote_file)
dirs = remote_dir.split('/')
for tmpdir in dirs:
if tmpdir.strip():
# 检查dir是否存在,空目录或者不存在
tmplist = f.nlst()
if tmpdir not in tmplist:
f.mkd(tmpdir)
f.cwd(tmpdir)
# 函数中厨时候的ftp需要关闭
# 检查目录下文件是否存在
tmplist = f.nlst()
return True if file_name in tmplist else False
except ftplib.all_errors as fa:
raise fa

View File

@ -1,150 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf8 -*-
import requests
import json
from addict import Dict
import logging
logger = logging.getLogger('default')
class HubUtils:
""" cloud market image hub upload/download interface """
def __init__(self, image_config):
self.username = image_config.get("oss_username")
self.password = image_config.get("oss_password")
self.namespace = image_config.get('oss_namespace')
self.host = image_config.get('oss_host')
self.cert = image_config.get('oss_cart')
self.dockersearch = '/api/v0/index/dockersearch' # get
self.search = '/api/v0/index/search' # get
self.reindex = '/api/v0/index/reindex' # POST
self.repo_error = {
400: '名称已经存在',
401: '客户端未授权',
403: '客户端无权限',
404: '账户不存在',
409: '未配置管理账户',
}
def rename_image(self, image, tag=None, namespace=None):
data = self.parse_image(image)
if not namespace:
namespace = self.namespace
# goodrain.me/xxx:tagx hub.goodrain.com/goodrain/xxx:tagx
end_name = '{0}/{1}'.format(self.host + '/' + namespace, data.name)
if tag is not None:
end_name = '{0}:{1}'.format(end_name, tag)
elif data.tag is not None:
end_name = '{0}:{1}'.format(end_name, data.tag)
return end_name
def parse_image(self, image):
if '/' in image:
host, full_name = image.split('/', 1)
else:
host, full_name = (None, image)
if ':' in full_name:
name, tag = full_name.split(':', 1)
else:
name, tag = (full_name, 'latest')
return Dict(host=host, name=name, tag=tag)
def check(self, image_name, tag_name, namespace=None):
# 1, 检查命名空间是否存在,
# 2, 存在,检查tag_name是否存在
# 3, 不存在,新建空间
if not namespace:
namespace = self.namespace
repositories_url = '/api/v0/repositories/%s' % namespace
url = 'https://' + self.host + '/' + repositories_url + '/' + image_name
headers = {'content-type': 'application/json'}
auth = requests.auth.HTTPBasicAuth(self.username, self.password)
resp = requests.get(url, headers=headers, verify=False, auth=auth)
code = resp.status_code
if code == 200:
logger.debug('mq_work.app_image', 'query {} result:{}'.format(url, resp.json()))
return True
else:
# 创建空间
payload = {'name': str(image_name), 'shortDescription': '', 'longDescription': '', 'visibility': 'public'}
url = 'https://' + self.host + '/' + repositories_url
respp = requests.post(url, headers=headers, verify=False,
auth=auth, data=json.dumps(payload))
if respp.status_code == 201:
logger.debug('mq_work.app_image', 'create repos namespace, result:{}'.format(respp.json()))
else:
logger.error('mq_work.app_image', 'result code:{}, msg:{}'.format(respp.status_code, self.repo_error[respp.status_code]))
return False
def check_image(self, hub_image, tag_name, namespace=None):
headers = {'content-type': 'application/json'}
if not namespace:
namespace = self.namespace
image_check = '/api/v0/repositories/' + namespace + '/{reponame}/tags'
url = 'https://' + self.host + '/' + image_check.format(reponame=hub_image)
auth = requests.auth.HTTPBasicAuth(self.username, self.password)
resp = requests.get(url, headers=headers, verify=False, auth=auth)
code = resp.status_code
if code == requests.codes.ok:
#
jsondata = resp.json()
tags = jsondata['tags']
namearray = [x['name'] for x in tags]
if tag_name in namearray:
return True
else:
return False
else:
return False
def check_repositories(self, repo, namespace=None):
""" 创建repositories """
if not namespace:
namespace = self.namespace
repositories_url = '/api/v0/repositories/%s' % namespace
url = 'https://' + self.host + '/' + repositories_url + '/' + repo
headers = {'content-type': 'application/json'}
auth = requests.auth.HTTPBasicAuth(self.username, self.password)
resp = requests.get(url, headers=headers, verify=False, auth=auth)
code = resp.status_code
repository = {}
if code == 200:
print resp.json()
jsondata = resp.json()
repository['id'] = jsondata['id']
repository['namespace'] = jsondata['namespace']
repository['namespaceType'] = jsondata['namespaceType']
repository['name'] = jsondata['name']
repository['visibility'] = jsondata['visibility']
repository['status'] = jsondata['status']
repository['code'] = 200
return repository
else:
payload = {'name': str(repo), 'shortDescription': '', 'longDescription': '', 'visibility': 'public'}
url = 'https://' + self.host + '/' + repositories_url
respp = requests.post(url,
headers=headers, verify=False,
auth=auth, data=json.dumps(payload))
if respp.status_code == 201:
print respp
print respp.json()
jsondata = respp.json()
repository['id'] = jsondata['id']
repository['namespace'] = jsondata['namespace']
repository['namespaceType'] = jsondata['namespaceType']
repository['name'] = jsondata['name']
repository['visibility'] = jsondata['visibility']
repository['status'] = jsondata['status']
repository['code'] = 200
else:
repository['code'] = respp.status_code
repository['msg'] = self.repo_error[respp.status_code]
return repository
# 命令行
if __name__ == "__main__":
print 'aaa'

View File

@ -1,27 +0,0 @@
import json
import logging
from _base import BaseHttpClient
logger = logging.getLogger('default')
class InfluxdbAPI(BaseHttpClient):
def __init__(self, conf, *args, **kwargs):
BaseHttpClient.__init__(self, *args, **kwargs)
self.default_headers = {'Connection': 'keep-alive'}
self.url = 'http://{0}:{1}/db/{2}/series?u={3}&p={4}'.format(conf.host, conf.port, conf.db, conf.user, conf.password)
def write(self, data):
if isinstance(data, (list, dict)):
data = json.dumps(data)
headers = self.default_headers.copy()
headers.update({'content-type': 'application/json'})
try:
res, body = self._post(self.url, headers, data)
if 200 <= res.status < 300:
return True
except Exception, e:
logger.exception('client_error', e)
return False

View File

@ -1,15 +0,0 @@
from _base import BaseHttpClient
class KubernetesApi(BaseHttpClient):
def __init__(self, conf=None, *arg, **kwargs):
super(KubernetesApi, self).__init__()
self._name = 'kubeapi'
self.base_url = conf.url
self.default_headers = {"Content-Type": "application/json"}
def get_rc(self, tenant_id, replica_id):
url = self.base_url + "/namespaces/{0}/replicationcontrollers/{1}".format(tenant_id, replica_id)
res, body = self._get(url, self.default_headers)
return body

View File

@ -1,43 +0,0 @@
import json
import logging
from _base import BaseHttpClient
logger = logging.getLogger('default')
class OperateDataApi(BaseHttpClient):
def __init__(self, conf=None, *args, **kwargs):
super(OperateDataApi, self).__init__()
self.default_headers = {"Content-Type": "application/json"}
if conf is None:
self.base_url = "http://op_console.goodrain.ali-sh.goodrain.net:10080"
else:
self.base_url = conf.url
def send_log(self, body):
url = self.base_url + '/api/operate-log/'
res, body = self._post(url, self.default_headers, body)
return res, body
def send_container(self, body):
url = self.base_url + '/api/operate-container/'
res, body = self._post(url, self.default_headers, body)
return res, body
def send_evnetdata(self, body):
url = self.base_url + '/api/operate-event/'
res, body = self._post(url, self.default_headers, body)
return res, body
def send_container_memory(self, body):
url = self.base_url + '/api/operate-container-memory/'
res, body = self._post(url, self.default_headers, body)
return res, body
def send_service_running(self, body):
url = self.base_url + '/api/operate-running-statics/'
res, body = self._post(url, self.default_headers, body)
return res, body

View File

@ -1,114 +0,0 @@
# -*- coding: utf8 -*-
import logging
import json
import os
from _base import BaseHttpClient
logger = logging.getLogger('default')
class RegionAPI(BaseHttpClient):
def __init__(self, conf=None, *arg, **kwargs):
super(RegionAPI, self).__init__()
self._name = 'region'
self.default_headers = {"Content-Type": "application/json"}
if conf["token"] is not None:
self.default_headers.update({"Authorization": "Token {}".format(conf["token"])})
if conf["url"] is None:
self.base_url = 'http://region.goodrain.me:8888'
else:
self.base_url = conf["url"]
def upgrade_service(self, service_id, body):
url = self.base_url + \
'/v1/services/lifecycle/{0}/upgrade/'.format(service_id)
res, body = self._post(url, self.default_headers, body)
return res, body
def rolling_upgrade_service(self, service_id, body):
url = self.base_url + \
'/v1/services/lifecycle/{0}/upgrade/'.format(service_id)
res, body = self._put(url, self.default_headers, body)
return res, body
def deploy_service(self, service_id, body):
url = self.base_url + \
'/v1/services/lifecycle/{0}/deploy/'.format(service_id)
res, body = self._post(url, self.default_headers, body)
return res, body
def start_service(self, service_id, body):
url = self.base_url + \
'/v1/services/lifecycle/{0}/start/'.format(service_id)
res, body = self._post(url, self.default_headers, body)
return res, body
def system_pause(self, tenant_id):
url = self.base_url + '/v1/tenants/{0}/system-pause'.format(tenant_id)
res, body = self._post(url, self.default_headers)
return res, body
def stop_service(self, service_id):
url = self.base_url + \
'/v1/services/lifecycle/{0}/stop/'.format(service_id)
tmp_body = json.dumps({
"event_id": "system"
})
res, body = self._post(url, self.default_headers, body=tmp_body)
return res, body
def update_b_event(self, service_id, body):
url = self.base_url + \
'/v1/services/lifecycle/{0}/beanstalk/'.format(service_id)
res, body = self._post(url, self.default_headers, body)
return body
def update_event(self, body):
url = self.base_url + '/v1/events'
res, body = self._put(url, self.default_headers, body)
return body
def get_history_pods(self, service_id):
url = self.base_url + '/v1/services/lifecycle/{0}/history_pods'.format(service_id)
res, body = self._get(url, self.default_headers)
return body
def clean_history_pods(self, service_id):
url = self.base_url + '/v1/services/lifecycle/{0}/history_pods'.format(service_id)
res, body = self._delete(url, self.default_headers)
return body
def get_lb_ngx_info(self, tenant_name, service_name):
url = self.base_url + '/v1/lb/ngx-info/{0}/{1}'.format(tenant_name, service_name)
res, body = self._get(url, self.default_headers)
return body
def renew_lb_ngx_info(self, body):
url = self.base_url + '/v1/lb/ngx-info'
res, body = self._post(url, self.default_headers, body)
return body
def set_service_running(self, service_id):
url = self.base_url + \
'/v1/services/lifecycle/{0}/set-running/'.format(service_id)
res, body = self._post(url, self.default_headers)
return res, body
def is_service_running(self, service_id):
url = self.base_url + \
'/v1/services/lifecycle/{0}/status/'.format(service_id)
res, body = self._post(url, self.default_headers)
return res, body
def opentsdbQuery(self, start, queries):
try:
url = self.base_url + "/v1/statistic/opentsdb/query"
data = {"start": start, "queries": queries}
res, body = self._post(url, self.default_headers, json.dumps(data))
dps = body[0]['dps']
return dps
except IndexError:
logger.info('tsdb_query', "request: {0}".format(url))
logger.info('tsdb_query', "response: {0} ====== {1}".format(res, body))
return None

View File

@ -1,60 +0,0 @@
import logging
import os
import json
from _base import BaseHttpClient
logger = logging.getLogger('default')
class RegionBackAPI(BaseHttpClient):
def __init__(self, conf=None, *args, **kwargs):
super(RegionBackAPI, self).__init__()
self._name = 'region'
self.default_headers = {"Content-Type": "application/json"}
if conf is None:
self.base_url = "http://localhost:3228/v2/builder"
else:
self.base_url = conf["url"]
def service_publish_success_region(self, body):
# url = self.base_url + '/api/tenants/services/publish'
url = self.base_url+ '/publish'
body["status"]="success"
logger.info("publish app to ys?{}".format(body["dest_ys"]))
res, body = self._post(url, self.default_headers, json.dumps(body))
return res, body
def service_publish_failure_region(self, body):
# url = self.base_url + '/api/tenants/services/publish'
url = self.base_url+ '/publish'
body["status"]="failure"
logger.info("publish app to ys?{}".format(body["dest_ys"]))
res, body = self._post(url, self.default_headers, json.dumps(body))
return res, body
def service_publish_new_region(self, body):
# url = self.base_url + '/api/tenants/services/publish'
url = self.base_url+ '/publish'
body["status"]="pushing"
logger.info("publish app to ys?{}".format(body["dest_ys"]))
res, body = self._post(url, self.default_headers, json.dumps(body))
return res, body
def code_check_region(self, body):
# url = self.base_url + '/api/tenants/services/codecheck'
url = self.base_url+'/codecheck'
print body
res, body = self._post(url, self.default_headers, body)
return res, body
def update_service_region(self, service_id, body):
url = self.base_url+'/codecheck/service/{0}'.format(service_id)
res, body = self._put(url, self.default_headers, body)
def update_version_region(self, body):
url = self.base_url+'/version'
res, body = self._post(url, self.default_headers, body)
def update_version_event(self, event_id,body):
url = self.base_url+'/version/event/{0}'.format(event_id)
res, body = self._post(url, self.default_headers, body)

View File

@ -1,72 +0,0 @@
from addict import Dict
from _base import SuperHttpClient
import logging
logger = logging.getLogger('default')
class RegistryAPI(SuperHttpClient):
def __init__(self, conf=None, host=None, *arg, **kwargs):
if conf is not None:
host = conf.host
super(RegistryAPI, self).__init__(host)
self.apitype = 'registry'
self.default_headers = {"Content-Type": "application/json", "Connection": "close"}
self.log_topic = None
def set_log_topic(self, topic):
self.log_topic = topic
def delete_image(self, image):
data = self.parse_image(image)
digest = self.get_manifest_digest(data)
url = self.base_url + '/v2/{0}/manifests/{1}'.format(data.name, digest)
res, body = self._delete(url, headers=self.default_headers)
def get_manifest_digest(self, data):
url = self.base_url + '/v2/{0}/manifests/{1}'.format(data.name, data.tag)
res, body = self._get(url, headers=self.default_headers)
return res['docker-content-digest']
def rename_image(self, image, tag=None):
data = self.parse_image(image)
end_name = '{0}/{1}'.format(self.host, data.name)
if tag is not None:
end_name = '{0}:{1}'.format(end_name, tag)
elif data.tag is not None:
end_name = '{0}:{1}'.format(end_name, data.tag)
return end_name
def exist_image(self, image):
data = self.parse_image(image)
url = self.base_url + '/v2/{0}/manifests/{1}'.format(data.name, data.tag)
try:
res, body = self._get(url, headers=self.default_headers)
is_exist = True
except self.CallApiError, e:
if e.status == 404:
is_exist = False
else:
raise e
if self.log_topic is not None:
logger.info(self.log_topic, "check image {0} is or not exists on {1}, result: {2}".format(image, self.host, is_exist))
return is_exist
def parse_image(self, image):
if '/' in image:
host, full_name = image.split('/', 1)
else:
host, full_name = (None, image)
if ':' in full_name:
name, tag = full_name.split(':', 1)
else:
name, tag = (full_name, 'latest')
return Dict(host=host, name=name, tag=tag)

View File

@ -1,90 +0,0 @@
# -*- coding: utf8 -*-
from pysnmp.entity.rfc3413.oneliner import cmdgen
class SnmpApi(object):
def __init__(self, host, port, version=1):
cmdGen = cmdgen.CommandGenerator()
self._cmd = cmdGen
self._snmp_version = version
if str(version) == '1':
self._auth_data = cmdgen.CommunityData('public', mpModel=0)
elif str(version) == '2c':
self._auth_data = cmdgen.CommunityData('public')
else:
raise ValueError("unsupport version: {0}".format(version))
self._target = cmdgen.UdpTransportTarget((host, port))
self._extra_mib_path = None
def _check_result(self, result):
errorIndication, errorStatus, errorIndex, varBinds = result
if errorIndication:
pass
else:
if errorStatus:
pass
else:
return varBinds
return []
def snmpwalk(self, MibVariable):
if self._extra_mib_path is not None:
MibVariable.addMibSource(self._extra_mib_path)
result = self._cmd.nextCmd(
self._auth_data,
self._target,
MibVariable
)
return self._check_result(result)
class ZxtmPoolStatic(SnmpApi):
def __init__(self, host, port):
SnmpApi.__init__(self, host, port)
def _as_list(self, data_list):
new_data = []
for l in data_list:
name, val = l[0]
item = (name.getOid().prettyPrint().split('.', 14)[-1], val.prettyPrint())
new_data.append(item)
return new_data
def _as_dict(self, data_list):
new_data = {}
for l in data_list:
name, val = l[0]
n, v = name.getOid().prettyPrint().split('.', 14)[-1], val.prettyPrint()
new_data[n] = v
return new_data
def add_mib_source(self, path):
self._extra_mib_path = path
def get_pool_names(self):
mib_variable = cmdgen.MibVariable('ZXTM-MIB', 'poolName')
data = self.snmpwalk(mib_variable)
return self._as_dict(data)
def get_pool_bytes_in_lo(self):
mib_variable = cmdgen.MibVariable('ZXTM-MIB', 'poolBytesInLo')
data = self.snmpwalk(mib_variable)
return self._as_dict(data)
def get_pool_bytes_in_hi(self):
mib_variable = cmdgen.MibVariable('ZXTM-MIB', 'poolBytesInHi')
data = self.snmpwalk(mib_variable)
return self._as_dict(data)
def get_pool_bytes_out_lo(self):
mib_variable = cmdgen.MibVariable('ZXTM-MIB', 'poolBytesOutLo')
data = self.snmpwalk(mib_variable)
return self._as_dict(data)
def get_pool_bytes_out_hi(self):
mib_variable = cmdgen.MibVariable('ZXTM-MIB', 'poolBytesOutHi')
data = self.snmpwalk(mib_variable)
return self._as_dict(data)

View File

@ -1,25 +0,0 @@
import json
import logging
from _base import BaseHttpClient
logger = logging.getLogger('default')
class OpentsdbAPI(BaseHttpClient):
def __init__(self, conf, *args, **kwargs):
BaseHttpClient.__init__(self, *args, **kwargs)
self.default_headers = {'Connection': 'keep-alive', 'content-type': 'application/json'}
self.url = 'http://{0}:{1}/api/put'.format(conf.host, conf.port)
def write(self, data):
if isinstance(data, (list, dict)):
data = json.dumps(data)
try:
res, body = self._post(self.url, self.default_headers, data)
return True
except self.CallApiError, e:
logger.exception('client_error', e)
return False

View File

@ -1,42 +0,0 @@
import logging
import os
from _base import BaseHttpClient
logger = logging.getLogger('default')
class UserConsoleAPI(BaseHttpClient):
def __init__(self, conf=None, *args, **kwargs):
super(UserConsoleAPI, self).__init__()
self._name = 'region'
self.default_headers = {"Content-Type": "application/json"}
if conf["token"] is not None:
self.default_headers.update({
"Authorization":
"Token {}".format(conf["token"])
})
if conf is None:
self.base_url = "https://user.goodrain.com"
else:
self.base_url = conf["url"]
def update_service(self, service_id, body):
#todo 127.0.0.1:3333/api/codecheck
url = self.base_url + '/api/services/{0}'.format(service_id)
# url = 'http://127.0.0.1:3228/api/codecheck/{0}'.format(service_id)
res, body = self._put(url, self.default_headers, body)
def code_check(self, body):
#todo 127.0.0.1:3333/api/codecheck
url = self.base_url + '/api/tenants/services/codecheck'
# url = 'http://127.0.0.1:3228/api/codecheck'
res, body = self._post(url, self.default_headers, body)
return res, body
def service_publish_success(self, body):
pass
#url = self.base_url + '/api/tenants/services/publish'
# url = 'http://127.0.0.1:3228/api/publish'
#res, body = self._post(url, self.default_headers, body)
#return res, body

View File

@ -1,168 +0,0 @@
# -*- coding: utf8 -*-
import time
import json
import httplib2
import urllib
import sys
from clients.region import RegionAPI
from clients.userconsole import UserConsoleAPI
from clients.region_api import RegionBackAPI
from utils.shell import Executer
from clients.etcdcli import TaskLocker
from utils.log import EventLog
import logging
import logging.config
from etc import settings
import fileinput
load_dict = {}
with open("plugins/config.json", 'r') as load_f:
load_dict = json.load(load_f)
logging.config.dictConfig(settings.get_logging(load_dict))
logger = logging.getLogger('default')
class CodeCheck():
watching_topics = ('service_event_msg', )
required_configs = ('region', 'userconsole', 'etcd.lock')
def __init__(self, job, *args, **kwargs):
self.job = job
self.configs = kwargs.get("config")
self.user_cs_client = UserConsoleAPI(conf=self.configs['userconsole'])
self.region_client = RegionBackAPI()
task = json.loads(self.job.body)
self.base_dir = kwargs.get('base_dir')
for k in ('tenant_id', 'service_id', 'action'):
setattr(self, k, task[k])
if 'event_id' in task:
self.event_id = task["event_id"]
self.log = EventLog().bind(event_id=self.event_id)
else:
self.event_id = "system"
self.log = EventLog().bind(event_id=self.event_id)
self.task = task
self.locker = TaskLocker(conf=self.configs['etcd'])
# self.waittime = int(task['wait_time'])
self.log.info(u"worker已收到异步任务。", step="worker")
def do_work(self):
logger.info('mq_work.service_event',
"plugin %s execute start" % __name__)
self.log.debug(u"代码检查异步处理开始。", step="worker", status="start")
self.code_check()
logger.info('mq_work.service_event',
"plugin %s execute finished" % __name__)
def code_check(self):
git_url = self.task['git_url']
check_type = self.task['check_type']
code_version = self.task['code_version']
git_project_id = self.task['git_project_id']
code_from = self.task['code_from']
url_repos = self.task['url_repos']
lock_id = 'code_check.' + self.service_id
logger.info(
'mq_work.code_check',
"git_url {0},check_type {1}, code_version {2},git_project_id {3},code_from {4},url_repos {5} ".
format(git_url, check_type, code_version, git_project_id,
code_from, url_repos))
try:
if self.locker.exists(lock_id):
logger.info('mq_work.code_check',
"lock_id {} exists, do nothing".format(lock_id))
self.log.info(
'lock_id {} exists, do nothing'.format(lock_id),
step="check_exist")
return
self.locker.add_lock(lock_id, bytes(git_url))
logger.info('add lock_id {}'.format(lock_id), step="check-exist")
except Exception, e:
pass
logger.info('mq_work.code_check', 'added lock <{}> for [{}]'.format(
lock_id, git_url))
logger.info(
'mq_work.code_check',
self.tenant_id + "=" + self.service_id + " start code check")
if self.event_id:
self.log.info(
"代码检测{0},{1} 开始".format(self.tenant_id, self.service_id),
step="check-start")
cmd = '/bin/bash {0}/scripts/detect.sh {1} {2} "{3}" {4}'.format(
self.base_dir, self.tenant_id, self.service_id, git_url,
self.base_dir)
try:
output = Executer.call(cmd)
self.requestConsole(self.service_id, output[0].rstrip('\n'),
check_type, git_url, code_version,
git_project_id, code_from, url_repos)
if self.event_id:
self.log.info("代码检测完成,请重新部署", step="last", status="success")
except Executer.ExecException, e:
logger.info('mq_work.code_check', 'code check failed')
logger.info('mq_work.code_check', e)
logger.info('mq_work.code_check', e.output)
self.log.error(
"代码检测异常 {}".format(e), step="callback", status="failure")
finally:
try:
self.locker.drop_lock(lock_id)
self.locker.release_lock()
except Exception, e:
pass
logger.info('mq_work.code_check',
self.tenant_id + "=" + self.service_id + " end code check")
if self.event_id:
self.log.info(
"代码检测{0},{1} 结束".format(self.tenant_id, self.service_id),
step="check-end")
def requestConsole(self, service_id, condition, check_type, git_url,
code_version, git_project_id, code_from, url_repos):
body = {
"service_id": service_id,
"condition": condition,
"check_type": check_type,
"git_url": git_url,
'code_version': code_version,
'git_project_id': git_project_id,
'code_from': code_from,
"url_repos": url_repos
}
logger.info('mq_work.service_event',
"service_id=" + service_id + ";condition=" + condition)
res, bodyres = self.user_cs_client.code_check(json.dumps(body))
try:
self.region_client.code_check_region(json.dumps(body))
except Exception, e:
pass
def main():
body = ""
for line in fileinput.input(): # read task from stdin
body = line
code_check = CodeCheck(job=Job(body=body), config=load_dict, base_dir=sys.path[0])
code_check.do_work()
class Job():
body = ""
def __init__(self, body, *args, **kwargs):
self.body = body
def get_body(self):
return self.body
def get_task(self):
task = json.loads(self.body)
return task
if __name__ == '__main__':
main()

View File

@ -1,59 +0,0 @@
{
"region": {
"url": "http://region.goodrain.me:8888",
"token": ""
},
"publish": {
"slug": {
"slug_path": "/grdata/build/tenant/",
"curr_region_path": "/grdata/build/tenant/",
"curr_region_dir": "app_publish/",
"all_region_ftp": false,
"all_region_ftp_host": "139.196.88.57",
"all_region_ftp_port": "10021",
"all_region_username": "goodrain-admin",
"all_region_password": "goodrain123465",
"all_region_namespace": "app-publish/",
"oss_ftp": true,
"oss_ftp_host": "139.196.88.57",
"oss_ftp_port": "10021",
"oss_username": "goodrain-admin",
"oss_password": "goodrain123465",
"oss_namespace": "app-publish/"
},
"image": {
"curr_registry": "goodrain.me",
"all_region_image": true,
"all_registry": "inner.goodrain.com",
"oss_image": true,
"oss_host": "hub.goodrain.com",
"oss_namespace": "goodrain",
"oss_username": "goodrain-admin",
"oss_password": "goodrain123465",
"oss_cart": "/usr/local/share/ca-certificates/hub.goodrain.com.crt"
}
},
"oss": {
"ali_shanghai": {
"id": "nMscVs3CaIXPEDUd",
"secret": "g4RWmftifuJxqUdqEWc69h0exO2V46",
"endpoint": "oss-cn-shanghai.aliyuncs.com"
}
},
"CLOUD_ASSISTANT": "goodrain",
"DEFAULT_HANDLERS": ["zmq_handler"],
"EVENT_LOG_ADDRESS": "tcp://127.0.0.1:6366",
"etcd": {
"host": "127.0.0.1",
"port": 2379
},
"userconsole": {
"url": "http://console.goodrain.me",
"token": ""
},
"zmq": {
"service_pub": {
"address": "tcp://127.0.0.1:9341"
}
}
}

View File

@ -1,62 +0,0 @@
# -*- coding: utf8 -*-
def get_logging(conf):
DEFAULT_HANDLERS = conf.get('DEFAULT_HANDLERS', ["console"])
ZMQ_LOG_ADDRESS = conf["zmq"]['service_pub']["address"]
EVENT_LOG_ADDRESS = conf.get("EVENT_LOG_ADDRESS", "tcp://127.0.0.1:6366")
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {},
'formatters': {
'standard': {
'format': "%(asctime)s [%(levelname)s] localhost [%(funcName)s] %(pathname)s:%(lineno)s %(message)s",
'datefmt': "%Y-%m-%d %H:%M:%S"
},
'zmq_formatter': {
'format': "%(asctime)s [%(levelname)s] %(hostname)s [%(funcName)s] %(pathname)s:%(lineno)s %(message)s",
'datefmt': "%Y-%m-%d %H:%M:%S"
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'standard',
},
'zmq_handler': {
'level': 'DEBUG',
'class': 'utils.log.ZmqHandler',
'address': ZMQ_LOG_ADDRESS,
'root_topic': 'labor',
'formatter': 'zmq_formatter',
},
'event_handler': {
'level': 'DEBUG',
'class': 'utils.log.EventHandler',
'address': EVENT_LOG_ADDRESS,
},
},
'loggers': {
'main': {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': True,
},
'default': {
'handlers': DEFAULT_HANDLERS,
'level': 'DEBUG',
'propagate': False,
},
'event': {
'handlers': ['event_handler'],
'level': 'DEBUG',
'propagate': False,
},
},
}
return LOGGING

View File

@ -1,284 +0,0 @@
# -*- coding: utf8 -*-
import os
import json
from utils.shell import Executer as shell
from clients.region import RegionAPI
from clients.registry import RegistryAPI
from clients.etcdcli import TaskLocker
from clients.userconsole import UserConsoleAPI
from clients.acp_api import ACPAPI
from clients.region_api import RegionBackAPI
import time
from utils.log import EventLog
import logging
import logging.config
from etc import settings
import fileinput
load_dict = {}
with open("plugins/config.json", 'r') as load_f:
load_dict = json.load(load_f)
logging.config.dictConfig(settings.get_logging(load_dict))
logger = logging.getLogger('default')
if os.access("/var/run/docker.sock", os.W_OK):
DOCKER_BIN = "docker"
else:
DOCKER_BIN = "sudo -P docker"
class ImageManual():
def __init__(self, job, *args, **kwargs):
self.job = job
self.configs = kwargs.get("config")
self.region_api = RegionAPI(conf=self.configs['region'])
image_config = self.configs["publish"]["image"]
self.region_client = RegionBackAPI()
self.region_registry = RegistryAPI(
host=image_config.get('curr_registry'))
# self.region_registry.set_log_topic('mq_work.image_manual')
self.oss_registry = RegistryAPI(host=image_config.get('all_registry'))
self.oss_registry.set_log_topic('mq_work.image_manual')
self.locker = TaskLocker(conf=self.configs['etcd'])
self.api = ACPAPI(conf=self.configs['region'])
self.namespace = image_config.get('oss_namespace')
self.user_cs_client = UserConsoleAPI(conf=self.configs['userconsole'])
def do_work(self):
try:
task = json.loads(self.job.body)
self.task = task
if "event_id" in self.task:
self.event_id = task["event_id"]
self.log = EventLog().bind(
event_id=self.event_id, step="image_manual")
else:
self.event_id = ""
self.log = EventLog().bind(event_id="", step="image_manual")
logger.info("mq_work.image_manual",
"new image_manual task: {}".format(task))
if task['action'] == 'create_new_version':
self.log.info("开始升级应用。")
self.create_new_version()
elif task['action'] == 'download_and_deploy':
self.log.info("开始下载镜像并部署应用。")
self.download_and_deploy()
elif task['action'] == 'delete_old_version':
self.log.info("开始删除旧版本。")
self.delete_old_version()
except Exception as e:
if self.log:
self.log.error(
"从自定义镜像部署应用失败。{}".format(e.message),
step="callback",
status="failure")
logger.exception('mq_work.image_manual', e)
def create_new_version(self):
logger.debug("mq_work.image_manual",
"now create new version and upload image")
def delete_old_version(self):
logger.debug("mq_work.image_manual", "now delete old version")
def download_and_deploy(self):
image = self.task['image']
# namespace = self.task['namespace']
tenant_name = self.task['tenant_name']
service_alias = self.task['service_alias']
event_id = self.task['event_id']
service_alias = self.task.get("service_alias", None)
has_download = False
inner_image = self.oss_registry.rename_image(image)
inner_image = "{0}_{1}".format(inner_image, service_alias)
local_image = self.region_registry.rename_image(image)
local_image = "{0}_{1}".format(local_image, service_alias)
# 直接下载docker image
try:
self.log.info("开始下载镜像:{0}".format(image))
pull_result = self._pull(image)
if pull_result:
# image_id = self.get_image_property(image, 'Id')
self._tag(image, local_image)
self.log.info("修改镜像名为:{0}".format(local_image))
ok = self._push(local_image)
if not ok:
self.log.error(
"上传镜像发生错误,重试失败,退出。", step="callback", status="failure")
return
self.log.info("镜像推送到本地仓库完成。")
# self._tag(image, inner_image)
# self._push(inner_image)
has_download = True
else:
self.log.error("下载镜像发生错误。", step="callback", status="failure")
logger.error("mq_work.image_manual",
"download image failed! image:{}".format(image))
except Exception as e:
self.log.error(
"镜像操作发生错误。{0}".format(e.__str__()),
step="callback",
status="failure")
logger.exception("mq_work.image_manual", e)
version_status = {
"final_status":"failure",
}
if has_download:
self.log.info("应用同步完成。", step="app-image", status="success")
version_body = {
"type": 'image',
"path": local_image,
"event_id": self.event_id
}
body = {
"deploy_version": self.task['deploy_version'],
"event_id": self.event_id,
}
version_status['final_status'] = "success"
try:
self.region_client.update_version_region(json.dumps(version_body))
self.region_client.update_version_event(self.event_id,json.dumps(version_status))
except Exception as e:
pass
try:
self.api.update_iamge(tenant_name, service_alias, local_image)
version = self.task['deploy_version']
self.log.info("应用信息更新完成,开始启动应用。", step="app-image", status="success")
self.api.upgrade_service(self.task['tenant_name'], self.task['service_alias'], json.dumps(body))
# self.api.start_service(tenant_name, service_alias, event_id)
except Exception as e:
logger.exception(e)
self.log.error(
"应用自动启动失败。请手动启动", step="callback", status="failure")
else:
try:
self.region_client.update_version_event(self.event_id,json.dumps(version_status))
except Exception as e:
pass
self.log.error("应用同步失败。", step="callback", status="failure")
def queryServiceStatus(self, service_id):
try:
res, body = self.region_api.is_service_running(service_id)
logger.info(
'mq_work.image_manual',
"service_id=" + service_id + ";body=" + json.dumps(body))
status = body.get(service_id, "closed")
if status == "running":
self.log.debug("依赖的应用状态已经为运行中。", step="worker")
return True
except:
pass
self.log.debug("依赖的应用状态不是运行中,本应用稍后启动。", step="worker")
return False
def get_image_property(self, image, name):
query_format = '{{.%s}}' % name
try:
output = shell.call("{2} inspect -f '{0}' {1}".format(
query_format, image, DOCKER_BIN))
if output == '<no value>':
return None
else:
return output[0].rstrip('\n')
except shell.ExecException, e:
logger.exception("mq_work.image_manual", e)
return None
def update_publish_event(self, **kwargs):
body = json.dumps(kwargs)
try:
self.region_api.update_event(body)
except Exception, e:
logger.exception("mq_work.image_manual", e)
def _pull(self, image):
cmd = "{0} pull {1}".format(DOCKER_BIN, image)
retry = 2
while retry:
try:
p = shell.start(cmd)
while p.is_running():
line = p.readline()
self.log.debug(
line.rstrip('\n').lstrip('\x1b[1G'), step="pull-image")
for line in p.unread_lines:
self.log.debug(line, step="pull-image")
if p.exit_with_err():
self.log.error(
"拉取镜像失败。" + ("开始进行重试." if retry > 0 else ""),
step="pull-image",
status="failure")
retry -= 1
continue
return True
except shell.ExecException, e:
self.log.error("下载镜像发生错误。{}" + ("开始进行重试." if retry > 0 else
"").format(e.message))
retry -= 1
return False
def _push(self, image):
cmd = "{0} push {1}".format(DOCKER_BIN, image)
logger.info("mq_work.image_manual", cmd)
retry = 2
while retry:
try:
p = shell.start(cmd)
while p.is_running():
line = p.readline()
self.log.debug(
line.rstrip('\n').lstrip('\x1b[1G'), step="push-image")
for line in p.unread_lines:
self.log.debug(line, step="push-image")
if p.exit_with_err():
self.log.error(
"上传镜像失败。" + ("开始进行重试." if retry > 0 else ""),
step="push-image",
status="failure")
retry -= 1
continue
return True
except shell.ExecException, e:
self.log.error("上传镜像发生错误。{}" + ("开始进行重试." if retry > 0 else
"").format(e.message))
logger.error(e)
retry -= 1
return False
def _tag(self, image_id, image):
cmd = "{2} tag {0} {1}".format(image_id, image, DOCKER_BIN)
logger.info("mq_work.image_manual", cmd)
shell.call(cmd)
def splitChild(self, childs):
data = []
for lock_event_id in childs:
data.append(lock_event_id.split("/")[-1])
return data
def main():
body = ""
for line in fileinput.input(): # read task from stdin
body = line
image_manual = ImageManual(config=load_dict, job=Job(body=body))
image_manual.do_work()
class Job():
body = ""
def __init__(self, body, *args, **kwargs):
self.body = body
def get_body(self):
return self.body
def get_task(self):
task = json.loads(self.body)
return task
if __name__ == '__main__':
main()

View File

@ -1,7 +0,0 @@
[template]
src = "env.tmpl"
dest = "/etc/profile.d/extend_env.sh"
mode = "0644"
keys = [
"/"
]

View File

@ -1,8 +0,0 @@
{{if ls "/goodrain/TENANT_ID/services/SERVICE_ID/dependency" }}
{{range gets "/goodrain/TENANT_ID/services/SERVICE_ID/dependency/*"}}
{{$data := json .Value}}
{{range $key,$value := $data}}
export {{$key}}={{$value}}
{{end}}
{{end}}
{{end}}

View File

@ -1,74 +0,0 @@
#!/bin/bash
_TYPE="_type_"
_ENTRYPOINT=`cat <<EOF
_entrypoint_
EOF`
_CMD=`cat <<EOF
_cmd_
EOF`
CONTAINER_CMD=""
if [[ $_TYPE == 'ENTRYPOINT' ]];then
if [ $# -gt 0 ];then
CONTAINER_CMD="$_ENTRYPOINT $@"
else
if [[ $_CMD != '_cmd_' ]];then
CONTAINER_CMD="$_ENTRYPOINT $_CMD"
else
CONTAINER_CMD=$_ENTRYPOINT
fi
fi
elif [[ $_TYPE == 'CMD' ]];then
if [ $# -gt 0 ];then
CONTAINER_CMD=$@
else
CONTAINER_CMD=$_CMD
fi
fi
# sed -i -e "s/TENANT_ID/$TENANT_ID/g" -e "s/SERVICE_ID/$SERVICE_ID/g" /etc/confd/templates/env.tmpl
# /opt/bin/confd -node=172.30.42.1:4001 -onetime
wait_pipe=$(mktemp -t "heroku.waitpipe.XXXXXX" -u)
rm -f $wait_pipe
mkfifo $wait_pipe
exec 3<> $wait_pipe
pids=()
trap 'trap - QUIT TERM EXIT; echo "Going down, terminating child processes..." >&2; rm -f ${wait_pipe} || true; echo ${pids[@]}; kill -TERM "${pids[@]}" 2> /dev/null || true; exit' QUIT TERM EXIT
if [[ -t 1 ]]; then
trap 'trap - INT; kill -INT $$; exit' INT;
else
trap '' INT;
fi
if [ -z $NO_GRPROXY ]; then
echo "Starting gr-listener..." >&2
(
trap 'echo "gr-listener" >&3;' EXIT
trap 'kill -TERM $! 2>/dev/null' TERM
/opt/bin/gr-listener -url=http://region.goodrain.me:8888/v1/conf/$TENANT_ID/$SERVICE_ID -wait_time=60 -frequency=once 2>&1 &
wait
) & pids+=($!)
fi
echo "Starting user process: $CONTAINER_CMD ..." >&2
(
trap 'echo "$CONTAINER_CMD" >&3;' EXIT
trap 'kill -TERM $! 2> /dev/null' TERM
$CONTAINER_CMD 2>&1 &
wait
) & pids+=($!)
# wait for something to come from the FIFO attached to FD 3, which means that the given process was killed or has failed
# this will be interrupted by a SIGTERM or SIGINT in the traps further up
# if the pipe unblocks and this executes, then we won't read it again, so if the traps further up kill the remaining subshells above, their writing to FD 3 will have no effect
read exitproc <&3
# we'll only reach this if one of the processes above has terminated
echo "Process exited unexpectedly: $exitproc" >&2
# this will trigger the EXIT trap further up and kill all remaining children
exit 1

View File

@ -1,30 +0,0 @@
# -*- coding: utf-8 -*-
'''
Created on 2012-6-29
@author: lijie.ma
'''
from aliyun.api.base import sign
class appinfo(object):
def __init__(self,accessKeyId,accessKeySecret):
self.accessKeyId = accessKeyId
self.accessKeySecret = accessKeySecret
def getDefaultAppInfo():
pass
def setDefaultAppInfo(accessKeyId,accessKeySecret):
default = appinfo(accessKeyId,accessKeySecret)
global getDefaultAppInfo
getDefaultAppInfo = lambda: default

View File

@ -1,3 +0,0 @@
# -*- coding: utf-8 -*-
from aliyun.api.rest import *
from aliyun.api.base import FileItem

View File

@ -1,258 +0,0 @@
# -*- coding: utf-8 -*-
'''
Created on 2012-7-3
@author: lijie.ma
'''
try: import httplib
except ImportError:
import http.client as httplib
import sys
import urllib
import time
import json
import aliyun
import itertools
import mimetypes
import base64
import hmac
import uuid
from hashlib import sha1
def sign(accessKeySecret, parameters):
#===========================================================================
# '''签名方法
# @param secret: 签名需要的密钥
# @param parameters: 支持字典和string两种
# '''
#===========================================================================
# 如果parameters 是字典类的话
sortedParameters = sorted(parameters.items(), key=lambda parameters: parameters[0])
canonicalizedQueryString = ''
for (k,v) in sortedParameters:
canonicalizedQueryString += '&' + percent_encode(k) + '=' + percent_encode(v)
stringToSign = 'POST&%2F&' + percent_encode(canonicalizedQueryString[1:])
h = hmac.new(accessKeySecret + "&", stringToSign, sha1)
signature = base64.encodestring(h.digest()).strip()
return signature
def percent_encode(encodeStr):
encodeStr = str(encodeStr)
res = urllib.quote(encodeStr.decode(sys.stdin.encoding).encode('utf8'), '')
res = res.replace('+', '%20')
res = res.replace('*', '%2A')
res = res.replace('%7E', '~')
return res
def mixStr(pstr):
if(isinstance(pstr, str)):
return pstr
elif(isinstance(pstr, unicode)):
return pstr.encode('utf-8')
else:
return str(pstr)
class FileItem(object):
def __init__(self,filename=None,content=None):
self.filename = filename
self.content = content
class MultiPartForm(object):
"""Accumulate the data to be used when posting a form."""
def __init__(self):
self.form_fields = []
self.files = []
self.boundary = "PYTHON_SDK_BOUNDARY"
return
def get_content_type(self):
return 'multipart/form-data; boundary=%s' % self.boundary
def add_field(self, name, value):
"""Add a simple field to the form data."""
self.form_fields.append((name, str(value)))
return
def add_file(self, fieldname, filename, fileHandle, mimetype=None):
"""Add a file to be uploaded."""
body = fileHandle.read()
if mimetype is None:
mimetype = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
self.files.append((mixStr(fieldname), mixStr(filename), mixStr(mimetype), mixStr(body)))
return
def __str__(self):
"""Return a string representing the form data, including attached files."""
# Build a list of lists, each containing "lines" of the
# request. Each part is separated by a boundary string.
# Once the list is built, return a string where each
# line is separated by '\r\n'.
parts = []
part_boundary = '--' + self.boundary
# Add the form fields
parts.extend(
[ part_boundary,
'Content-Disposition: form-data; name="%s"' % name,
'Content-Type: text/plain; charset=UTF-8',
'',
value,
]
for name, value in self.form_fields
)
# Add the files to upload
parts.extend(
[ part_boundary,
'Content-Disposition: file; name="%s"; filename="%s"' % \
(field_name, filename),
'Content-Type: %s' % content_type,
'Content-Transfer-Encoding: binary',
'',
body,
]
for field_name, filename, content_type, body in self.files
)
# Flatten the list and add closing boundary marker,
# then return CR+LF separated data
flattened = list(itertools.chain(*parts))
flattened.append('--' + self.boundary + '--')
flattened.append('')
return '\r\n'.join(flattened)
class AliyunException(Exception):
#===========================================================================
# 业务异常类
#===========================================================================
def __init__(self):
self.code = None
self.message = None
self.host = None
self.requestId = None
def __str__(self, *args, **kwargs):
sb = "code=" + mixStr(self.code) +\
" message=" + mixStr(self.message) +\
" host=" + mixStr(self.host) +\
" requestId=" + mixStr(self.requestId)
return sb
class RequestException(Exception):
#===========================================================================
# 请求连接异常类
#===========================================================================
pass
class RestApi(object):
#===========================================================================
# Rest api的基类
#===========================================================================
def __init__(self, domain, port = 80):
#=======================================================================
# 初始化基类
# Args @param domain: 请求的域名或者ip
# @param port: 请求的端口
#=======================================================================
self.__domain = domain
self.__port = port
self.__httpmethod = "POST"
if(aliyun.getDefaultAppInfo()):
self.__access_key_id = aliyun.getDefaultAppInfo().accessKeyId
self.__access_key_secret = aliyun.getDefaultAppInfo().accessKeySecret
def get_request_header(self):
return {
'Content-type': 'application/x-www-form-urlencoded',
"Cache-Control": "no-cache",
"Connection": "Keep-Alive",
}
def set_app_info(self, appinfo):
#=======================================================================
# 设置请求的app信息
# @param appinfo: import aliyun
# appinfo aliyun.appinfo(accessKeyId,accessKeySecret)
#=======================================================================
self.__access_key_id = appinfo.accessKeyId
self.__access_key_secret = appinfo.accessKeySecret
def getapiname(self):
return ""
def getMultipartParas(self):
return [];
def getTranslateParas(self):
return {};
def _check_requst(self):
pass
def getResponse(self, authrize=None, timeout=30):
#=======================================================================
# 获取response结果
#=======================================================================
connection = httplib.HTTPConnection(self.__domain, self.__port, timeout)
timestamp = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
apiname_split = self.getapiname().split(".")
parameters = { \
'Format' : 'json', \
'Version' : apiname_split[4], \
'Action' : apiname_split[3], \
'AccessKeyId' : self.__access_key_id, \
'SignatureVersion' : '1.0', \
'SignatureMethod' : 'HMAC-SHA1', \
'SignatureNonce' : str(uuid.uuid1()), \
'TimeStamp' : timestamp, \
'partner_id' : '1.0',\
}
application_parameter = self.getApplicationParameters()
for key in application_parameter.keys():
parameters[key] = application_parameter[key]
signature = sign(self.__access_key_secret,parameters)
parameters['Signature'] = signature
url = "/?" + urllib.urlencode(parameters)
connection.connect()
header = self.get_request_header();
if(self.getMultipartParas()):
form = MultiPartForm()
for key in self.getMultipartParas():
fileitem = getattr(self,key)
if(fileitem and isinstance(fileitem,FileItem)):
form.add_file(key,fileitem.filename,fileitem.content)
body = str(form)
header['Content-type'] = form.get_content_type()
else:
body = None
connection.request(self.__httpmethod, url, body=body, headers=header)
response = connection.getresponse();
result = response.read()
jsonobj = json.loads(result)
return jsonobj
def getApplicationParameters(self):
application_parameter = {}
for key, value in self.__dict__.iteritems():
if not key.startswith("__") and not key in self.getMultipartParas() and not key.startswith("_RestApi__") and value is not None :
if(key.startswith("_")):
application_parameter[key[1:]] = value
else:
application_parameter[key] = value
#查询翻译字典来规避一些关键字属性
translate_parameter = self.getTranslateParas()
for key, value in application_parameter.iteritems():
if key in translate_parameter:
application_parameter[translate_parameter[key]] = application_parameter[key]
del application_parameter[key]
return application_parameter

View File

@ -1,13 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Bss20140714SetResourceBusinessStatusRequest(RestApi):
def __init__(self,domain='bss.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.BusinessStatus = None
self.ResourceId = None
self.ResourceType = None
def getapiname(self):
return 'bss.aliyuncs.com.SetResourceBusinessStatus.2014-07-14'

View File

@ -1,13 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Cdn20141111DescribeCdnMonitorDataRequest(RestApi):
def __init__(self,domain='cdn.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.DomainName = None
self.EndTime = None
self.StartTime = None
def getapiname(self):
return 'cdn.aliyuncs.com.DescribeCdnMonitorData.2014-11-11'

View File

@ -1,10 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Cdn20141111DescribeCdnServiceRequest(RestApi):
def __init__(self,domain='cdn.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
def getapiname(self):
return 'cdn.aliyuncs.com.DescribeCdnService.2014-11-11'

View File

@ -1,14 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Cdn20141111DescribeRefreshTasksRequest(RestApi):
def __init__(self,domain='cdn.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.ObjectPath = None
self.PageNumber = None
self.PageSize = None
self.TaskId = None
def getapiname(self):
return 'cdn.aliyuncs.com.DescribeRefreshTasks.2014-11-11'

View File

@ -1,12 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Cdn20141111DescribeUserDomainsRequest(RestApi):
def __init__(self,domain='cdn.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.PageNumber = None
self.PageSize = None
def getapiname(self):
return 'cdn.aliyuncs.com.DescribeUserDomains.2014-11-11'

View File

@ -1,11 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Cdn20141111ModifyCdnServiceRequest(RestApi):
def __init__(self,domain='cdn.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.InternetChargeType = None
def getapiname(self):
return 'cdn.aliyuncs.com.ModifyCdnService.2014-11-11'

View File

@ -1,11 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Cdn20141111OpenCdnServiceRequest(RestApi):
def __init__(self,domain='cdn.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.InternetChargeType = None
def getapiname(self):
return 'cdn.aliyuncs.com.OpenCdnService.2014-11-11'

View File

@ -1,12 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Cdn20141111RefreshObjectCachesRequest(RestApi):
def __init__(self,domain='cdn.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.ObjectPath = None
self.ObjectType = None
def getapiname(self):
return 'cdn.aliyuncs.com.RefreshObjectCaches.2014-11-11'

View File

@ -1,17 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Dns20150109AddDomainRecordRequest(RestApi):
def __init__(self,domain='dns.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.DomainName = None
self.Line = None
self.Priority = None
self.RR = None
self.TTL = None
self.Type = None
self.Value = None
def getapiname(self):
return 'dns.aliyuncs.com.AddDomainRecord.2015-01-09'

View File

@ -1,12 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Dns20150109AddDomainRequest(RestApi):
def __init__(self,domain='dns.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.desc = None
self.domainName = None
def getapiname(self):
return 'dns.aliyuncs.com.AddDomain.2015-01-09'

View File

@ -1,11 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Dns20150109ApplyForRetrievalDomainNameRequest(RestApi):
def __init__(self,domain='dns.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.domainName = None
def getapiname(self):
return 'dns.aliyuncs.com.ApplyForRetrievalDomainName.2015-01-09'

View File

@ -1,11 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Dns20150109DeleteDomainRecordRequest(RestApi):
def __init__(self,domain='dns.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.RecordId = None
def getapiname(self):
return 'dns.aliyuncs.com.DeleteDomainRecord.2015-01-09'

View File

@ -1,11 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Dns20150109DeleteDomainRequest(RestApi):
def __init__(self,domain='dns.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.domainName = None
def getapiname(self):
return 'dns.aliyuncs.com.DeleteDomain.2015-01-09'

View File

@ -1,11 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Dns20150109DescribeDomainInfoRequest(RestApi):
def __init__(self,domain='dns.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.domainName = None
def getapiname(self):
return 'dns.aliyuncs.com.DescribeDomainInfo.2015-01-09'

View File

@ -1,11 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Dns20150109DescribeDomainRecordInfoRequest(RestApi):
def __init__(self,domain='dns.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.RecordId = None
def getapiname(self):
return 'dns.aliyuncs.com.DescribeDomainRecordInfo.2015-01-09'

View File

@ -1,16 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Dns20150109DescribeDomainRecordsRequest(RestApi):
def __init__(self,domain='dns.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.DomainName = None
self.PageNumber = None
self.PageSize = None
self.RRKeyWord = None
self.TypeKeyWord = None
self.ValueKeyWord = None
def getapiname(self):
return 'dns.aliyuncs.com.DescribeDomainRecords.2015-01-09'

View File

@ -1,11 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Dns20150109DescribeDomainWhoisInfoRequest(RestApi):
def __init__(self,domain='dns.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.DomainName = None
def getapiname(self):
return 'dns.aliyuncs.com.DescribeDomainWhoisInfo.2015-01-09'

View File

@ -1,13 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Dns20150109DescribeDomainsRequest(RestApi):
def __init__(self,domain='dns.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.KeyWord = None
self.PageSize = None
self.pageNumber = None
def getapiname(self):
return 'dns.aliyuncs.com.DescribeDomains.2015-01-09'

View File

@ -1,11 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Dns20150109RetrievalDomainNameRequest(RestApi):
def __init__(self,domain='dns.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.domainName = None
def getapiname(self):
return 'dns.aliyuncs.com.RetrievalDomainName.2015-01-09'

View File

@ -1,12 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Dns20150109SetDomainRecordStatusRequest(RestApi):
def __init__(self,domain='dns.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.RecordId = None
self.Status = None
def getapiname(self):
return 'dns.aliyuncs.com.SetDomainRecordStatus.2015-01-09'

View File

@ -1,17 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Dns20150109UpdateDomainRecordRequest(RestApi):
def __init__(self,domain='dns.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.Line = None
self.Priority = None
self.RR = None
self.RecordId = None
self.TTL = None
self.Type = None
self.Value = None
def getapiname(self):
return 'dns.aliyuncs.com.UpdateDomainRecord.2015-01-09'

View File

@ -1,14 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110AddDiskRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.ClientToken = None
self.InstanceId = None
self.Size = None
self.SnapshotId = None
def getapiname(self):
return 'ecs.aliyuncs.com.AddDisk.2013-01-10'

View File

@ -1,13 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110AddIpRangeRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.IpAddress = None
self.RegionId = None
self.ZoneId = None
def getapiname(self):
return 'ecs.aliyuncs.com.AddIpRange.2013-01-10'

View File

@ -1,13 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110AllocatePublicIpAddressRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.InstanceId = None
self.IpAddress = None
self.VlanId = None
def getapiname(self):
return 'ecs.aliyuncs.com.AllocatePublicIpAddress.2013-01-10'

View File

@ -1,18 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110AuthorizeSecurityGroupRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.IpProtocol = None
self.NicType = None
self.Policy = None
self.PortRange = None
self.RegionId = None
self.SecurityGroupId = None
self.SourceCidrIp = None
self.SourceGroupId = None
def getapiname(self):
return 'ecs.aliyuncs.com.AuthorizeSecurityGroup.2013-01-10'

View File

@ -1,15 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110CreateImageRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.ClientToken = None
self.Description = None
self.ImageVersion = None
self.RegionId = None
self.SnapshotId = None
def getapiname(self):
return 'ecs.aliyuncs.com.CreateImage.2013-01-10'

View File

@ -1,41 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110CreateInstanceRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.ClientToken = None
self.DataDisk_1_Category = None
self.DataDisk_1_Size = None
self.DataDisk_1_SnapshotId = None
self.DataDisk_2_Category = None
self.DataDisk_2_Size = None
self.DataDisk_2_SnapshotId = None
self.DataDisk_3_Category = None
self.DataDisk_3_Size = None
self.DataDisk_3_SnapshotId = None
self.DataDisk_4_Category = None
self.DataDisk_4_Size = None
self.DataDisk_4_SnapshotId = None
self.HostName = None
self.ImageId = None
self.InnerIpAddress = None
self.InstanceName = None
self.InstanceType = None
self.InternetChargeType = None
self.InternetMaxBandwidthIn = None
self.InternetMaxBandwidthOut = None
self.NodeControllerId = None
self.Password = None
self.RegionId = None
self.SecurityGroupId = None
self.SystemDisk_Category = None
self.VlanId = None
self.ZoneId = None
def getapiname(self):
return 'ecs.aliyuncs.com.CreateInstance.2013-01-10'
def getTranslateParas(self):
return {'DataDisk_3_Category':'DataDisk.3.Category','DataDisk_2_SnapshotId':'DataDisk.2.SnapshotId','DataDisk_4_Size':'DataDisk.4.Size','DataDisk_1_Size':'DataDisk.1.Size','DataDisk_3_SnapshotId':'DataDisk.3.SnapshotId','DataDisk_1_SnapshotId':'DataDisk.1.SnapshotId','SystemDisk_Category':'SystemDisk.Category','DataDisk_2_Size':'DataDisk.2.Size','DataDisk_4_Category':'DataDisk.4.Category','DataDisk_3_Size':'DataDisk.3.Size','DataDisk_1_Category':'DataDisk.1.Category','DataDisk_4_SnapshotId':'DataDisk.4.SnapshotId','DataDisk_2_Category':'DataDisk.2.Category'}

View File

@ -1,13 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110CreateSecurityGroupRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.ClientToken = None
self.Description = None
self.RegionId = None
def getapiname(self):
return 'ecs.aliyuncs.com.CreateSecurityGroup.2013-01-10'

View File

@ -1,14 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110CreateSnapshotRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.ClientToken = None
self.DiskId = None
self.InstanceId = None
self.SnapshotName = None
def getapiname(self):
return 'ecs.aliyuncs.com.CreateSnapshot.2013-01-10'

View File

@ -1,12 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110DeleteDiskRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.DiskId = None
self.InstanceId = None
def getapiname(self):
return 'ecs.aliyuncs.com.DeleteDisk.2013-01-10'

View File

@ -1,12 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110DeleteImageRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.ImageId = None
self.RegionId = None
def getapiname(self):
return 'ecs.aliyuncs.com.DeleteImage.2013-01-10'

View File

@ -1,11 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110DeleteInstanceRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.InstanceId = None
def getapiname(self):
return 'ecs.aliyuncs.com.DeleteInstance.2013-01-10'

View File

@ -1,12 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110DeleteSecurityGroupRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.RegionId = None
self.SecurityGroupId = None
def getapiname(self):
return 'ecs.aliyuncs.com.DeleteSecurityGroup.2013-01-10'

View File

@ -1,13 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110DeleteSnapshotRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.DiskId = None
self.InstanceId = None
self.SnapshotId = None
def getapiname(self):
return 'ecs.aliyuncs.com.DeleteSnapshot.2013-01-10'

View File

@ -1,15 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110DescribeImagesRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.ImageId = None
self.ImageOwnerAlias = None
self.PageNumber = None
self.PageSize = None
self.RegionId = None
def getapiname(self):
return 'ecs.aliyuncs.com.DescribeImages.2013-01-10'

View File

@ -1,11 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110DescribeInstanceAttributeRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.InstanceId = None
def getapiname(self):
return 'ecs.aliyuncs.com.DescribeInstanceAttribute.2013-01-10'

View File

@ -1,11 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110DescribeInstanceDisksRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.InstanceId = None
def getapiname(self):
return 'ecs.aliyuncs.com.DescribeInstanceDisks.2013-01-10'

View File

@ -1,14 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110DescribeInstanceStatusRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.PageNumber = None
self.PageSize = None
self.RegionId = None
self.ZoneId = None
def getapiname(self):
return 'ecs.aliyuncs.com.DescribeInstanceStatus.2013-01-10'

View File

@ -1,10 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110DescribeInstanceTypesRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
def getapiname(self):
return 'ecs.aliyuncs.com.DescribeInstanceTypes.2013-01-10'

View File

@ -1,10 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110DescribeRegionsRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
def getapiname(self):
return 'ecs.aliyuncs.com.DescribeRegions.2013-01-10'

View File

@ -1,13 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110DescribeSecurityGroupAttributeRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.NicType = None
self.RegionId = None
self.SecurityGroupId = None
def getapiname(self):
return 'ecs.aliyuncs.com.DescribeSecurityGroupAttribute.2013-01-10'

View File

@ -1,13 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110DescribeSecurityGroupsRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.PageNumber = None
self.PageSize = None
self.RegionId = None
def getapiname(self):
return 'ecs.aliyuncs.com.DescribeSecurityGroups.2013-01-10'

View File

@ -1,12 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110DescribeSnapshotAttributeRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.RegionId = None
self.SnapshotId = None
def getapiname(self):
return 'ecs.aliyuncs.com.DescribeSnapshotAttribute.2013-01-10'

View File

@ -1,12 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110DescribeSnapshotsRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.DiskId = None
self.InstanceId = None
def getapiname(self):
return 'ecs.aliyuncs.com.DescribeSnapshots.2013-01-10'

View File

@ -1,11 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110DescribeZonesRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.RegionId = None
def getapiname(self):
return 'ecs.aliyuncs.com.DescribeZones.2013-01-10'

View File

@ -1,15 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110GetMonitorDataRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.InstanceId = None
self.PageNumber = None
self.PageSize = None
self.RegionId = None
self.Time = None
def getapiname(self):
return 'ecs.aliyuncs.com.GetMonitorData.2013-01-10'

View File

@ -1,12 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110JoinSecurityGroupRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.InstanceId = None
self.SecurityGroupId = None
def getapiname(self):
return 'ecs.aliyuncs.com.JoinSecurityGroup.2013-01-10'

View File

@ -1,12 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110LeaveSecurityGroupRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.InstanceId = None
self.SecurityGroupId = None
def getapiname(self):
return 'ecs.aliyuncs.com.LeaveSecurityGroup.2013-01-10'

View File

@ -1,15 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110ModifyInstanceAttributeRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.HostName = None
self.InstanceId = None
self.InstanceName = None
self.Password = None
self.SecurityGroupId = None
def getapiname(self):
return 'ecs.aliyuncs.com.ModifyInstanceAttribute.2013-01-10'

View File

@ -1,12 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110RebootInstanceRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.ForceStop = None
self.InstanceId = None
def getapiname(self):
return 'ecs.aliyuncs.com.RebootInstance.2013-01-10'

View File

@ -1,12 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110ReleasePublicIpAddressRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.InstanceId = None
self.PublicIpAddress = None
def getapiname(self):
return 'ecs.aliyuncs.com.ReleasePublicIpAddress.2013-01-10'

View File

@ -1,13 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110ResetDiskRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.DiskId = None
self.InstanceId = None
self.SnapshotId = None
def getapiname(self):
return 'ecs.aliyuncs.com.ResetDisk.2013-01-10'

View File

@ -1,13 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110ResetInstanceRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.DiskType = None
self.ImageId = None
self.InstanceId = None
def getapiname(self):
return 'ecs.aliyuncs.com.ResetInstance.2013-01-10'

View File

@ -1,18 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110RevokeSecurityGroupRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.IpProtocol = None
self.NicType = None
self.Policy = None
self.PortRange = None
self.RegionId = None
self.SecurityGroupId = None
self.SourceCidrIp = None
self.SourceGroupId = None
def getapiname(self):
return 'ecs.aliyuncs.com.RevokeSecurityGroup.2013-01-10'

View File

@ -1,11 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110StartInstanceRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.InstanceId = None
def getapiname(self):
return 'ecs.aliyuncs.com.StartInstance.2013-01-10'

View File

@ -1,12 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20130110StopInstanceRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.ForceStop = None
self.InstanceId = None
def getapiname(self):
return 'ecs.aliyuncs.com.StopInstance.2013-01-10'

View File

@ -1,13 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20140526AllocateEipAddressRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.Bandwidth = None
self.InternetChargeType = None
self.RegionId = None
def getapiname(self):
return 'ecs.aliyuncs.com.AllocateEipAddress.2014-05-26'

View File

@ -1,13 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20140526AllocatePublicIpAddressRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.InstanceId = None
self.IpAddress = None
self.VlanId = None
def getapiname(self):
return 'ecs.aliyuncs.com.AllocatePublicIpAddress.2014-05-26'

View File

@ -1,12 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20140526AssociateEipAddressRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.AllocationId = None
self.InstanceId = None
def getapiname(self):
return 'ecs.aliyuncs.com.AssociateEipAddress.2014-05-26'

View File

@ -1,14 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20140526AttachDiskRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.DeleteWithInstance = None
self.Device = None
self.DiskId = None
self.InstanceId = None
def getapiname(self):
return 'ecs.aliyuncs.com.AttachDisk.2014-05-26'

View File

@ -1,19 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20140526AuthorizeSecurityGroupRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.IpProtocol = None
self.NicType = None
self.Policy = None
self.PortRange = None
self.RegionId = None
self.SecurityGroupId = None
self.SourceCidrIp = None
self.SourceGroupId = None
self.SourceGroupOwnerAccount = None
def getapiname(self):
return 'ecs.aliyuncs.com.AuthorizeSecurityGroup.2014-05-26'

View File

@ -1,12 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20140526CancelCopyImageRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.ImageId = None
self.RegionId = None
def getapiname(self):
return 'ecs.aliyuncs.com.CancelCopyImage.2014-05-26'

View File

@ -1,15 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20140526CopyImageRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.DestinationDescription = None
self.DestinationImageName = None
self.DestinationRegionId = None
self.ImageId = None
self.RegionId = None
def getapiname(self):
return 'ecs.aliyuncs.com.CopyImage.2014-05-26'

View File

@ -1,18 +0,0 @@
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Ecs20140526CreateDiskRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.ClientToken = None
self.Description = None
self.DiskCategory = None
self.DiskName = None
self.RegionId = None
self.Size = None
self.SnapshotId = None
self.ZoneId = None
def getapiname(self):
return 'ecs.aliyuncs.com.CreateDisk.2014-05-26'

Some files were not shown because too many files have changed in this diff Show More