feat: v1.0.0

This commit is contained in:
LinskRuis.32G 2021-07-22 10:53:38 +08:00
parent 612a9abee1
commit 74eccd2991
414 changed files with 18251 additions and 24813 deletions

151
.gitignore vendored
View File

@ -1,135 +1,24 @@
# https://github.com/github/gitignore/blob/master/Java.gitignore
# Binaries for programs and plugins
*.exe
*.exe~
*.dll
*.so
*.dylib
*.dat
*.class
/ruis.go
# Mobile Tools for Java (J2ME)
.mtj.tmp/
# Package Files #
#*.jar
*.war
*.ear
*.log
*.log.*
# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
hs_err_pid*
/.project
# https://github.com/github/gitignore/blob/master/Maven.gitignore
target/
pom.xml.tag
pom.xml.releaseBackup
pom.xml.versionsBackup
pom.xml.next
release.properties
dependency-reduced-pom.xml
buildNumber.properties
.mvn/timing.properties
# https://github.com/github/gitignore/blob/master/Global/Eclipse.gitignore
.metadata
tmp/
*.tmp
# Test binary, built with `go test -c`
/.idea
/.github
/dbm.bat
/dbm.sh
/model.yml
/install.html
*.test
*.bak
*.swp
*~.nib
local.properties
.settings/
.loadpath
.recommenders
# Eclipse Core
.project
# External tool builders
.externalToolBuilders/
# Locally stored "Eclipse launch configurations"
*.launch
# PyDev specific (Python IDE for Eclipse)
*.pydevproject
# CDT-specific (C/C++ Development Tooling)
.cproject
# JDT-specific (Eclipse Java Development Tools)
.classpath
# Java annotation processor (APT)
.factorypath
# PDT-specific (PHP Development Tools)
.buildpath
# sbteclipse plugin
.target
# Tern plugin
.tern-project
# TeXlipse plugin
.texlipse
# STS (Spring Tool Suite)
.springBeans
# Code Recommenders
.recommenders/
# https://github.com/github/gitignore/blob/master/Global/JetBrains.gitignore
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
## File-based project format:
*.iws
## Plugin-specific files:
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# zeroturnaround
rebel.xml
# log
log/
logs/
~$*.*
# IntelliJ IDEA
*.iml
.idea/
.gradle/
cmake-build-*/
# Visual Studio
.vs/
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
bin/
obj/
CMakeFiles/
resource/*.o
# Dependency directories (remove the comment below to include it)
# vendor/
__debug_bin

View File

@ -1,22 +0,0 @@
## 感谢 lockp111 提供的Dockerfile
FROM golang:1.15.2-alpine AS builder
RUN apk add git gcc libc-dev && git clone https://github.com/mgr9525/gokins.git /build
WORKDIR /build
RUN GOOS=linux GOARCH=amd64 go build -o bin/gokins main.go
FROM alpine:latest AS final
RUN apk update \
&& apk upgrade \
&& apk --no-cache add openssl \
&& apk --no-cache add ca-certificates \
&& rm -rf /var/cache/apk \
&& mkdir -p /app
COPY --from=builder /build/bin/gokins /app
WORKDIR /app
ENTRYPOINT ["/app/gokins"]

215
LICENSE
View File

@ -1,202 +1,21 @@
MIT License
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
Copyright (c) 2021 gokins-main
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
1. Definitions.
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

296
README.md
View File

@ -1,257 +1,121 @@
<p align="center"><img src="static/logo.jpg" width="50%" height="50%"></p>
# Gokins: *More Power*
构建赋能,更加强大
# Gokins文档
[![License](https://img.shields.io/badge/license-Apache%202-4EB1BA.svg)](https://www.apache.org/licenses/LICENSE-2.0.html)
# Gokins: *More Power*
-------
## What does it do
Gokins是一个由Go语言和Vue编写的款轻量级、能够持续集成和持续交付的工具
![](https://static01.imgkr.com/temp/5ca8a54f7d6544b6a2c740d5f559e5c4.jpg)
* **持续集成和持续交付**
作为一个可扩展的自动化服务器Gokins 可以用作简单的 CI 服务器,或者变成任何项目的持续交付中心
Gokins一款由Go语言和Vue编写的款轻量级、能够持续集成和持续交付的工具.
* **持续集成和持续交付**
作为一个可扩展的自动化服务器Gokins 可以用作简单的 CI 服务器,或者变成任何项目的持续交付中心
* **简易安装**
Gokins 是一个基于 Go 的独立程序,可以立即运行,包含 Windows、Mac OS X 和其他类 Unix 操作系统。
* **配置简单**
Gokins 可以通过其网页界面轻松设置和配置,几乎没有难度。
Gokins 是一个基于 Go 的独立程序,可以立即运行,包含 Windows、Mac OS X 和其他类 Unix 操作系统。
* **安全**
绝不收集任何用户、服务器信息,是一个独立安全的服务
绝不收集任何用户、服务器信息,是一个独立安全的服务
## Gokins 官网
**地址 : http://gokins.cn**
**地址 : http://gokins.cn**
目前gokins的1.0版本正在重构中。
可在官网上获取最新的Gokins动态
1.0版本将会比现在的版本提供更多的特性和更加的简洁。
## Demo
**体验地址http://demo.gokins.cn:8030**
1. 本示例仅提供展示作用(登录密码123456)
2. 此示例每天凌晨会有gokins定时器 __重新编译__
3. 如果进不去,就是被其他人玩坏了,请等待重新编译(第二天早点来)
4. 示例提供`git`、`gcc`、`golang`、`java8`、`maven`环境
## Gokins Demo
http://gokins.cn:8030
```
用户名: guest
密码: 123456
```
## Quick Start
It is super easy to get started with your first project.
#### Step 1: 下载
#### Step 1: 环境准备
[latest stable release](https://github.com/mgr9525/gokins/releases).
- Mysql
- Dokcer(非必要)
#### Step 2: 启动服务
#### Step 2: 下载
- Linux下载:http://bin.gokins.cn/gokins-linux-amd64
- Mac下载:http://bin.gokins.cn/gokins-darwin-amd64
> 我们推荐使用docker或者直接下载release的方式安装Gokins`
#### Step 3: 启动服务
```
./gokins
```
#### Step 3: 查看服务
访问 `http://localhost:8030`
#### Step 3: 安装Gokins
访问 `http://localhost:8030`进入到Gokins安装页面
![](https://static01.imgkr.com/temp/e484d9747dec43108325c22283abe39f.png)
按页面上的提示填入信息
默认管理员账号密码
`username :gokins `
`pwd: 123456 `
#### Step 4: 新建流水线
- 进入到流水线页面
![](https://static01.imgkr.com/temp/ce383350056d4a63872b868c8f169c39.png)
### 使用gokins
- 点击新建流水线
![](https://static01.imgkr.com/temp/a3c2a870c9d94956bda2a685cc447077.png)
#### 下载运行
填入流水线基本信息
- github地址 https://github.com/mgr9525/gokins
- gitee地址 https://gitee.com/gokins/gokins
- 流水线配置
可在对应平台需找发行版
- 或者直接在服务器上执行以下命令
```
version: 1.0
vars:
stages:
- stage:
displayName: build
name: build
steps:
- step: shell@sh
displayName: test-build
name: build
env:
commands:
- echo Hello World
```
//获取可执行文件
wget -c https://github.com/mgr9525/gokins/releases/download/v0.1.2/gokins-linux-amd64
关于流水线配置的YML更多信息请访问 [YML文档](http://gokins.cn/%E5%B7%A5%E4%BD%9C%E6%B5%81%E8%AF%AD%E6%B3%95/)
//授权
chmod +x gokins-linux-amd64
//运行gokins
./gokins-linux-amd64
- 运行流水线
//查看帮助命令
./gokins-linux-amd64 --help
![](https://static01.imgkr.com/temp/f002a22738644c8dbd40f0860c2bbb9e.png)
```
- 运行成功后 访问 `8030` 端口
`这里可以选择输入仓库分支或者commitSha,如果不填则为默认分支`
#### 初始化配置
- 设置root账号密码
- 查看运行结果
![在这里插入图片描述](https://img-blog.csdnimg.cn/2020101018222471.png?x-oss-process=image/watermark,type_ZmFuZ3poZW5naGVpdGk,shadow_10,text_aHR0cHM6Ly9ibG9nLmNzZG4ubmV0L21ncjk1MjU=,size_16,color_FFFFFF,t_70#pic_center)
- 登录后我们可以查看主界面
![在这里插入图片描述](https://img-blog.csdnimg.cn/20201010183151544.png?x-oss-process=image/watermark,type_ZmFuZ3poZW5naGVpdGk,shadow_10,text_aHR0cHM6Ly9ibG9nLmNzZG4ubmV0L21ncjk1MjU=,size_16,color_FFFFFF,t_70#pic_center)
#### 流水线的使用
1. 更新git代码
配置代码目录
```
cd ~
mkdir programs
cd programs
git clone http://用户名:密码@git.xxx.cn/IPFS/IPFS-Slave.git
cd IPFS-Slave/
pwd
```
使用用户名密码clone防止流水线更新代码时需要登录凭证
复制此目录路径,新建流水线时需要
2. 流水线构建
![在这里插入图片描述](https://img-blog.csdnimg.cn/20201010185507308.png?x-oss-process=image/watermark,type_ZmFuZ3poZW5naGVpdGk,shadow_10,text_aHR0cHM6Ly9ibG9nLmNzZG4ubmV0L21ncjk1MjU=,size_16,color_FFFFFF,t_70#pic_center)
3. 新建好之后进入插件列表新建插件
![在这里插入图片描述](https://img-blog.csdnimg.cn/20201010190422932.png?x-oss-process=image/watermark,type_ZmFuZ3poZW5naGVpdGk,shadow_10,text_aHR0cHM6Ly9ibG9nLmNzZG4ubmV0L21ncjk1MjU=,size_16,color_FFFFFF,t_70#pic_center)
4. 首先新建一个更新git的插件
![在这里插入图片描述](https://img-blog.csdnimg.cn/20201010185741861.png?x-oss-process=image/watermark,type_ZmFuZ3poZW5naGVpdGk,shadow_10,text_aHR0cHM6Ly9ibG9nLmNzZG4ubmV0L21ncjk1MjU=,size_16,color_FFFFFF,t_70#pic_center)
5. 之后新建一个编译插件
![在这里插入图片描述](https://img-blog.csdnimg.cn/20201010185940669.png?x-oss-process=image/watermark,type_ZmFuZ3poZW5naGVpdGk,shadow_10,text_aHR0cHM6Ly9ibG9nLmNzZG4ubmV0L21ncjk1MjU=,size_16,color_FFFFFF,t_70#pic_center)
可以查看到有两个插件
![在这里插入图片描述](https://img-blog.csdnimg.cn/20201010190009652.png?x-oss-process=image/watermark,type_ZmFuZ3poZW5naGVpdGk,shadow_10,text_aHR0cHM6Ly9ibG9nLmNzZG4ubmV0L21ncjk1MjU=,size_16,color_FFFFFF,t_70#pic_center)
6. 返回运行流水线
![在这里插入图片描述](https://img-blog.csdnimg.cn/20201010190548125.png?x-oss-process=image/watermark,type_ZmFuZ3poZW5naGVpdGk,shadow_10,text_aHR0cHM6Ly9ibG9nLmNzZG4ubmV0L21ncjk1MjU=,size_16,color_FFFFFF,t_70#pic_center)
![在这里插入图片描述](https://img-blog.csdnimg.cn/20201010190653913.png?x-oss-process=image/watermark,type_ZmFuZ3poZW5naGVpdGk,shadow_10,text_aHR0cHM6Ly9ibG9nLmNzZG4ubmV0L21ncjk1MjU=,size_16,color_FFFFFF,t_70#pic_center)
![在这里插入图片描述](https://img-blog.csdnimg.cn/20201010190721865.png?x-oss-process=image/watermark,type_ZmFuZ3poZW5naGVpdGk,shadow_10,text_aHR0cHM6Ly9ibG9nLmNzZG4ubmV0L21ncjk1MjU=,size_16,color_FFFFFF,t_70#pic_center)
- 以上就完成了一个简单的CI流水线工程
## 开发Gokins
Gokins目前还处于需要不断进步的阶段如果你有兴趣假如我们可以在github上提交pr或者iss
### 服务器环境
服务器: ubuntu18 linux64位
项目需要环境git、golang、node.js
### 安装项目环境
1. git
`要求 git 2.17.1 或者 2.x 其他版本`
2. node.js
`要求 node 12.19.0 `
4. golang
`要求 golang 1.15.2 `
## Gokins的优势和未来趋势
- 体积小无论是可执行文件还是运行内存相对于Jenkins的运行内存节约不止一个量级目前Gokins的运行内存大概就是在20kb左右
- 简单友好的UI交互更加利于上手
- 自定义的插件配置,让你可以应对多种情况
- 安全,绝不收集任何用户或者服务器信息
在未来一个简单、易用的CI/CD工具会大大提高企业的生产效率。同时Gokins不仅仅可以作为代码交付方面的工具同样可以成为自动化运维的核心组件用于支持大型工程。
## 更多
### 帮助
```
./gokins -h
```
### 升级
升级数据库、添加触发器功能针对于于之前使用过Gokins的用户
```
./gokins -up
```
## Contact
* Join us from QQ(Group : 975316343).
<p align="center"><img src="static/qq.jpg" width="50%" height="50%"></p>
## Download
- [Github Release](https://github.com/mgr9525/gokins/releases)
## Who is using
<a href="http://1ydts.com" align="center"><img src="static/whouse/biaotou.jpg" width="50%" height="50%"></a>
## 开发日志
### Gokins V0.2.0 (更新日期2020-10-24)
- 新增功能
1. 新增gitlab、gitee的webhook触发流水线的方式
2. 新增流水行执行完毕后的后续再执行工作功能
- bug fix
1. 修复一些已知问题
***
### Gokins V0.1.2 (更新日期2020-10-20)
- 新增功能
1. 新增触发器(加入流水线日志触发方式)
2. 实现触发器managertask
3. 新增升级功能 `./gokins -up`升级数据库、添加触发器功能针对于于之前使用过Gokins的用户
- bug fix
1. 前端循环请求卡死问题
2. 协程context严重bug整改
- 优化问题
1. 日志入库改为日志文件,减少数据库大小
2. 环境变量PATH优化,加入变量获取
![](https://static01.imgkr.com/temp/681c8ea0a7dc45bcb9fe14234c5761be.png)

View File

@ -1,28 +0,0 @@
package main
import (
"encoding/base64"
"fmt"
"io/ioutil"
)
func main() {
bdsqls()
bdzip()
}
func bdsqls() {
bts, _ := ioutil.ReadFile("doc/sys.sql")
ioutil.WriteFile("comm/dbfl.go",
[]byte(fmt.Sprintf("package comm\n\nconst sqls = `\n%s\n`", string(bts))),
0644)
println("sql insert go ok!!!")
}
func bdzip() {
bts, _ := ioutil.ReadFile("uis/vue-admin/dist/dist.zip")
cont := base64.StdEncoding.EncodeToString(bts)
ioutil.WriteFile("comm/vuefl.go",
[]byte(fmt.Sprintf("package comm\n\nconst StaticPkg = \"%s\"", cont)),
0644)
println("ui insert go ok!!!")
}

107
bean/condition.go Normal file
View File

@ -0,0 +1,107 @@
package bean
import (
"regexp"
"strings"
)
func skipBranch(c *Condition, branch string) bool {
return !c.Match(branch)
}
func skipCommitMessages(c *Condition, branch string) bool {
return !c.Match(branch)
}
func skipCommitNotes(c *Condition, branch string) bool {
return !c.Match(branch)
}
func (c *Condition) Match(v string) bool {
if c == nil {
return false
}
if c.Include != nil && c.Exclude != nil {
return c.Includes(v) && !c.Excludes(v)
}
if c.Include != nil && c.Includes(v) {
return true
}
if c.Exclude != nil && !c.Excludes(v) {
return true
}
return false
}
func (c *Condition) Excludes(v string) bool {
for _, in := range c.Exclude {
if in == "" {
continue
}
if in == v {
return true
}
if isMatch(v, in) {
return true
}
reg, err := regexp.Compile(in)
if err != nil {
return false
}
match := reg.Match([]byte(strings.Replace(v, "\n", "", -1)))
if match {
return true
}
}
return false
}
func (c *Condition) Includes(v string) bool {
for _, in := range c.Include {
if in == "" {
continue
}
if in == v {
return true
}
if isMatch(v, in) {
return true
}
reg, err := regexp.Compile(in)
if err != nil {
return false
}
match := reg.Match([]byte(strings.Replace(v, "\n", "", -1)))
if match {
return true
}
}
return false
}
func isMatch(s string, p string) bool {
m, n := len(s), len(p)
dp := make([][]bool, m+1)
for i := 0; i <= m; i++ {
dp[i] = make([]bool, n+1)
}
dp[0][0] = true
for i := 1; i <= n; i++ {
if p[i-1] == '*' {
dp[0][i] = true
} else {
break
}
}
for i := 1; i <= m; i++ {
for j := 1; j <= n; j++ {
if p[j-1] == '*' {
dp[i][j] = dp[i][j-1] || dp[i-1][j]
} else if s[i-1] == p[j-1] {
dp[i][j] = dp[i-1][j-1]
}
}
}
return dp[m][n]
}

15
bean/db.go Normal file
View File

@ -0,0 +1,15 @@
package bean
type Page struct {
Page int64 `json:"page"`
Size int64 `json:"size"`
Total int64 `json:"total"`
Pages int64 `json:"pages"`
Data interface{} `json:"data"`
}
type PageGen struct {
SQL string
Args []interface{}
CountCols string
FindCols string
}

18
bean/http.go Normal file
View File

@ -0,0 +1,18 @@
package bean
type IdsRes struct {
Id string `json:"id"`
Aid int64 `json:"aid"`
}
type LoginReq struct {
Name string `json:"name"`
Pass string `json:"pass"`
}
type LoginRes struct {
Token string `json:"token"`
Id string `json:"id"`
Name string `json:"name"`
Nick string `json:"nick"`
Avatar string `json:"avatar"`
LastLoginTime string `json:"lastLoginTime"`
}

13
bean/models.go Normal file
View File

@ -0,0 +1,13 @@
package bean
type PipelineShow struct {
Id string `json:"id"`
Uid string `json:"uid"`
Name string `json:"name"`
DisplayName string `json:"displayName"`
PipelineType string `json:"pipelineType"`
YmlContent string `json:"ymlContent"`
Url string `json:"url"`
Username string `json:"username"`
AccessToken string `json:"accessToken"`
}

26
bean/pipeline.go Normal file
View File

@ -0,0 +1,26 @@
package bean
type NewPipeline struct {
Name string `json:"name"`
DisplayName string `json:"displayName"`
Content string `json:"content"`
OrgId string `json:"orgId"`
AccessToken string `json:"accessToken"`
Url string `json:"url"`
Username string `json:"username"`
Vars []*NewPipelineVar `json:"vars"`
}
type NewPipelineVar struct {
Name string `json:"name"`
Value string `json:"value"`
Remarks string `json:"remarks"`
Public bool `json:"public"`
}
func (p *NewPipeline) Check() bool {
if p.Name == "" || p.Content == "" {
return false
}
return true
}

10
bean/pipelinevar.go Normal file
View File

@ -0,0 +1,10 @@
package bean
type PipelineVar struct {
Aid int64 `json:"aid"`
PipelineId string `json:"pipelineId"`
Name string ` json:"name"`
Value string ` json:"value"`
Remarks string ` json:"remarks"`
Public bool ` json:"public"`
}

18
bean/runtime.go Normal file
View File

@ -0,0 +1,18 @@
package bean
import "time"
type LogOutJson struct {
Id string `json:"id"`
Content string `json:"content"`
Times time.Time `json:"times"`
Errs bool `json:"errs"`
}
type LogOutJsonRes struct {
Id string `json:"id"`
Content string `json:"content"`
Times time.Time `json:"times"`
Errs bool `json:"errs"`
Offset int64 `json:"offset"`
}

121
bean/thirdbean/gitea.go Normal file
View File

@ -0,0 +1,121 @@
package thirdbean
import "time"
type ResultGiteaRepo struct {
Id int `json:"id"`
Owner struct {
Id int `json:"id"`
Login string `json:"login"`
FullName string `json:"full_name"`
Email string `json:"email"`
AvatarUrl string `json:"avatar_url"`
Language string `json:"language"`
IsAdmin bool `json:"is_admin"`
LastLogin time.Time `json:"last_login"`
Created time.Time `json:"created"`
Restricted bool `json:"restricted"`
Active bool `json:"active"`
ProhibitLogin bool `json:"prohibit_login"`
Location string `json:"location"`
Website string `json:"website"`
Description string `json:"description"`
Username string `json:"username"`
} `json:"owner"`
Name string `json:"name"`
FullName string `json:"full_name"`
Description string `json:"description"`
Empty bool `json:"empty"`
Private bool `json:"private"`
Fork bool `json:"fork"`
Template bool `json:"template"`
Parent interface{} `json:"parent"`
Mirror bool `json:"mirror"`
Size int `json:"size"`
HtmlUrl string `json:"html_url"`
SshUrl string `json:"ssh_url"`
CloneUrl string `json:"clone_url"`
OriginalUrl string `json:"original_url"`
Website string `json:"website"`
StarsCount int `json:"stars_count"`
ForksCount int `json:"forks_count"`
WatchersCount int `json:"watchers_count"`
OpenIssuesCount int `json:"open_issues_count"`
OpenPrCounter int `json:"open_pr_counter"`
ReleaseCounter int `json:"release_counter"`
DefaultBranch string `json:"default_branch"`
Archived bool `json:"archived"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
Permissions struct {
Admin bool `json:"admin"`
Push bool `json:"push"`
Pull bool `json:"pull"`
} `json:"permissions"`
HasIssues bool `json:"has_issues"`
InternalTracker struct {
EnableTimeTracker bool `json:"enable_time_tracker"`
AllowOnlyContributorsToTrackTime bool `json:"allow_only_contributors_to_track_time"`
EnableIssueDependencies bool `json:"enable_issue_dependencies"`
} `json:"internal_tracker"`
HasWiki bool `json:"has_wiki"`
HasPullRequests bool `json:"has_pull_requests"`
HasProjects bool `json:"has_projects"`
IgnoreWhitespaceConflicts bool `json:"ignore_whitespace_conflicts"`
AllowMergeCommits bool `json:"allow_merge_commits"`
AllowRebase bool `json:"allow_rebase"`
AllowRebaseExplicit bool `json:"allow_rebase_explicit"`
AllowSquashMerge bool `json:"allow_squash_merge"`
DefaultMergeStyle string `json:"default_merge_style"`
AvatarUrl string `json:"avatar_url"`
Internal bool `json:"internal"`
MirrorInterval string `json:"mirror_interval"`
}
type ResultGiteaRepoBranch struct {
Name string `json:"name"`
Commit struct {
Id string `json:"id"`
Message string `json:"message"`
Url string `json:"url"`
Author struct {
Name string `json:"name"`
Email string `json:"email"`
Username string `json:"username"`
} `json:"author"`
Committer struct {
Name string `json:"name"`
Email string `json:"email"`
Username string `json:"username"`
} `json:"committer"`
Verification struct {
Verified bool `json:"verified"`
Reason string `json:"reason"`
Signature string `json:"signature"`
Signer interface{} `json:"signer"`
Payload string `json:"payload"`
} `json:"verification"`
Timestamp time.Time `json:"timestamp"`
Added interface{} `json:"added"`
Removed interface{} `json:"removed"`
Modified interface{} `json:"modified"`
} `json:"commit"`
Protected bool `json:"protected"`
RequiredApprovals int `json:"required_approvals"`
EnableStatusCheck bool `json:"enable_status_check"`
StatusCheckContexts []interface{} `json:"status_check_contexts"`
UserCanPush bool `json:"user_can_push"`
UserCanMerge bool `json:"user_can_merge"`
EffectiveBranchProtectionName string `json:"effective_branch_protection_name"`
}
type ResultGetGiteaHook struct {
Id int `json:"id"`
Type string `json:"type"`
Config struct {
ContentType string `json:"content_type"`
Url string `json:"url"`
} `json:"config"`
Events []string `json:"events"`
Active bool `json:"active"`
UpdatedAt time.Time `json:"updated_at"`
CreatedAt time.Time `json:"created_at"`
}

171
bean/thirdbean/gitee.go Normal file
View File

@ -0,0 +1,171 @@
package thirdbean
import "time"
type ResultGiteeCreateHooks struct {
Id int `json:"id"`
Url string `json:"url"`
CreatedAt time.Time `json:"created_at"`
Password string `json:"password"`
ProjectId int `json:"project_id"`
Result string `json:"result"`
ResultCode interface{} `json:"result_code"`
PushEvents bool `json:"push_events"`
TagPushEvents bool `json:"tag_push_events"`
IssuesEvents bool `json:"issues_events"`
NoteEvents bool `json:"note_events"`
MergeRequestsEvents bool `json:"merge_requests_events"`
}
type ResultGiteeRepo struct {
Id int64 `json:"id"`
FullName string `json:"full_name"`
HumanName string `json:"human_name"`
Url string `json:"url"`
Namespace struct {
Id int `json:"id"`
Type string `json:"type"`
Name string `json:"name"`
Path string `json:"path"`
HtmlUrl string `json:"html_url"`
} `json:"namespace"`
Path string `json:"path"`
Name string `json:"name"`
Owner struct {
Id int `json:"id"`
Login string `json:"login"`
Name string `json:"name"`
AvatarUrl string `json:"avatar_url"`
Url string `json:"url"`
HtmlUrl string `json:"html_url"`
FollowersUrl string `json:"followers_url"`
FollowingUrl string `json:"following_url"`
GistsUrl string `json:"gists_url"`
StarredUrl string `json:"starred_url"`
SubscriptionsUrl string `json:"subscriptions_url"`
OrganizationsUrl string `json:"organizations_url"`
ReposUrl string `json:"repos_url"`
EventsUrl string `json:"events_url"`
ReceivedEventsUrl string `json:"received_events_url"`
Type string `json:"type"`
} `json:"owner"`
Description string `json:"description"`
Private bool `json:"private"`
Public bool `json:"public"`
Internal bool `json:"internal"`
Fork bool `json:"fork"`
HtmlUrl string `json:"html_url"`
SshUrl string `json:"ssh_url"`
ForksUrl string `json:"forks_url"`
KeysUrl string `json:"keys_url"`
CollaboratorsUrl string `json:"collaborators_url"`
HooksUrl string `json:"hooks_url"`
BranchesUrl string `json:"branches_url"`
TagsUrl string `json:"tags_url"`
BlobsUrl string `json:"blobs_url"`
StargazersUrl string `json:"stargazers_url"`
ContributorsUrl string `json:"contributors_url"`
CommitsUrl string `json:"commits_url"`
CommentsUrl string `json:"comments_url"`
IssueCommentUrl string `json:"issue_comment_url"`
IssuesUrl string `json:"issues_url"`
PullsUrl string `json:"pulls_url"`
MilestonesUrl string `json:"milestones_url"`
NotificationsUrl string `json:"notifications_url"`
LabelsUrl string `json:"labels_url"`
ReleasesUrl string `json:"releases_url"`
Recommend bool `json:"recommend"`
Homepage interface{} `json:"homepage"`
Language string `json:"language"`
ForksCount int `json:"forks_count"`
StargazersCount int `json:"stargazers_count"`
WatchersCount int `json:"watchers_count"`
DefaultBranch string `json:"default_branch"`
OpenIssuesCount int `json:"open_issues_count"`
HasIssues bool `json:"has_issues"`
HasWiki bool `json:"has_wiki"`
IssueComment bool `json:"issue_comment"`
CanComment bool `json:"can_comment"`
PullRequestsEnabled bool `json:"pull_requests_enabled"`
HasPage bool `json:"has_page"`
License string `json:"license"`
Outsourced bool `json:"outsourced"`
ProjectCreator string `json:"project_creator"`
Members []string `json:"members"`
PushedAt time.Time `json:"pushed_at"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
Parent interface{} `json:"parent"`
Paas interface{} `json:"paas"`
Stared bool `json:"stared"`
Watched bool `json:"watched"`
Permission struct {
Pull bool `json:"pull"`
Push bool `json:"push"`
Admin bool `json:"admin"`
} `json:"permission"`
Relation string `json:"relation"`
AssigneesNumber int `json:"assignees_number"`
TestersNumber int `json:"testers_number"`
Assignees []struct {
Id int `json:"id"`
Login string `json:"login"`
Name string `json:"name"`
AvatarUrl string `json:"avatar_url"`
Url string `json:"url"`
HtmlUrl string `json:"html_url"`
FollowersUrl string `json:"followers_url"`
FollowingUrl string `json:"following_url"`
GistsUrl string `json:"gists_url"`
StarredUrl string `json:"starred_url"`
SubscriptionsUrl string `json:"subscriptions_url"`
OrganizationsUrl string `json:"organizations_url"`
ReposUrl string `json:"repos_url"`
EventsUrl string `json:"events_url"`
ReceivedEventsUrl string `json:"received_events_url"`
Type string `json:"type"`
} `json:"assignees"`
Testers []struct {
Id int `json:"id"`
Login string `json:"login"`
Name string `json:"name"`
AvatarUrl string `json:"avatar_url"`
Url string `json:"url"`
HtmlUrl string `json:"html_url"`
FollowersUrl string `json:"followers_url"`
FollowingUrl string `json:"following_url"`
GistsUrl string `json:"gists_url"`
StarredUrl string `json:"starred_url"`
SubscriptionsUrl string `json:"subscriptions_url"`
OrganizationsUrl string `json:"organizations_url"`
ReposUrl string `json:"repos_url"`
EventsUrl string `json:"events_url"`
ReceivedEventsUrl string `json:"received_events_url"`
Type string `json:"type"`
} `json:"testers"`
}
type ResultGiteeRepoBranch struct {
Name string `json:"name"`
Commit struct {
Sha string `json:"sha"`
Url string `json:"url"`
} `json:"commit"`
Protected bool `json:"protected"`
ProtectionUrl string `json:"protection_url"`
}
type ResultGetGiteeHook struct {
Id int `json:"id"`
Url string `json:"url"`
CreatedAt time.Time `json:"created_at"`
Password string `json:"password"`
ProjectId int `json:"project_id"`
Result string `json:"result"`
ResultCode int `json:"result_code"`
PushEvents bool `json:"push_events"`
TagPushEvents bool `json:"tag_push_events"`
IssuesEvents bool `json:"issues_events"`
NoteEvents bool `json:"note_events"`
MergeRequestsEvents bool `json:"merge_requests_events"`
}

View File

@ -0,0 +1,171 @@
package thirdbean
import "time"
type ResultGiteePremiumCreateHooks struct {
Id int `json:"id"`
Url string `json:"url"`
CreatedAt time.Time `json:"created_at"`
Password string `json:"password"`
ProjectId int `json:"project_id"`
Result string `json:"result"`
ResultCode interface{} `json:"result_code"`
PushEvents bool `json:"push_events"`
TagPushEvents bool `json:"tag_push_events"`
IssuesEvents bool `json:"issues_events"`
NoteEvents bool `json:"note_events"`
MergeRequestsEvents bool `json:"merge_requests_events"`
}
type ResultGiteePremiumRepo struct {
Id int64 `json:"id"`
FullName string `json:"full_name"`
HumanName string `json:"human_name"`
Url string `json:"url"`
Namespace struct {
Id int `json:"id"`
Type string `json:"type"`
Name string `json:"name"`
Path string `json:"path"`
HtmlUrl string `json:"html_url"`
} `json:"namespace"`
Path string `json:"path"`
Name string `json:"name"`
Owner struct {
Id int `json:"id"`
Login string `json:"login"`
Name string `json:"name"`
AvatarUrl string `json:"avatar_url"`
Url string `json:"url"`
HtmlUrl string `json:"html_url"`
FollowersUrl string `json:"followers_url"`
FollowingUrl string `json:"following_url"`
GistsUrl string `json:"gists_url"`
StarredUrl string `json:"starred_url"`
SubscriptionsUrl string `json:"subscriptions_url"`
OrganizationsUrl string `json:"organizations_url"`
ReposUrl string `json:"repos_url"`
EventsUrl string `json:"events_url"`
ReceivedEventsUrl string `json:"received_events_url"`
Type string `json:"type"`
} `json:"owner"`
Description string `json:"description"`
Private bool `json:"private"`
Public bool `json:"public"`
Internal bool `json:"internal"`
Fork bool `json:"fork"`
HtmlUrl string `json:"html_url"`
SshUrl string `json:"ssh_url"`
ForksUrl string `json:"forks_url"`
KeysUrl string `json:"keys_url"`
CollaboratorsUrl string `json:"collaborators_url"`
HooksUrl string `json:"hooks_url"`
BranchesUrl string `json:"branches_url"`
TagsUrl string `json:"tags_url"`
BlobsUrl string `json:"blobs_url"`
StargazersUrl string `json:"stargazers_url"`
ContributorsUrl string `json:"contributors_url"`
CommitsUrl string `json:"commits_url"`
CommentsUrl string `json:"comments_url"`
IssueCommentUrl string `json:"issue_comment_url"`
IssuesUrl string `json:"issues_url"`
PullsUrl string `json:"pulls_url"`
MilestonesUrl string `json:"milestones_url"`
NotificationsUrl string `json:"notifications_url"`
LabelsUrl string `json:"labels_url"`
ReleasesUrl string `json:"releases_url"`
Recommend bool `json:"recommend"`
Homepage interface{} `json:"homepage"`
Language string `json:"language"`
ForksCount int `json:"forks_count"`
StargazersCount int `json:"stargazers_count"`
WatchersCount int `json:"watchers_count"`
DefaultBranch string `json:"default_branch"`
OpenIssuesCount int `json:"open_issues_count"`
HasIssues bool `json:"has_issues"`
HasWiki bool `json:"has_wiki"`
IssueComment bool `json:"issue_comment"`
CanComment bool `json:"can_comment"`
PullRequestsEnabled bool `json:"pull_requests_enabled"`
HasPage bool `json:"has_page"`
License string `json:"license"`
Outsourced bool `json:"outsourced"`
ProjectCreator string `json:"project_creator"`
Members []string `json:"members"`
PushedAt time.Time `json:"pushed_at"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
Parent interface{} `json:"parent"`
Paas interface{} `json:"paas"`
Stared bool `json:"stared"`
Watched bool `json:"watched"`
Permission struct {
Pull bool `json:"pull"`
Push bool `json:"push"`
Admin bool `json:"admin"`
} `json:"permission"`
Relation string `json:"relation"`
AssigneesNumber int `json:"assignees_number"`
TestersNumber int `json:"testers_number"`
Assignees []struct {
Id int `json:"id"`
Login string `json:"login"`
Name string `json:"name"`
AvatarUrl string `json:"avatar_url"`
Url string `json:"url"`
HtmlUrl string `json:"html_url"`
FollowersUrl string `json:"followers_url"`
FollowingUrl string `json:"following_url"`
GistsUrl string `json:"gists_url"`
StarredUrl string `json:"starred_url"`
SubscriptionsUrl string `json:"subscriptions_url"`
OrganizationsUrl string `json:"organizations_url"`
ReposUrl string `json:"repos_url"`
EventsUrl string `json:"events_url"`
ReceivedEventsUrl string `json:"received_events_url"`
Type string `json:"type"`
} `json:"assignees"`
Testers []struct {
Id int `json:"id"`
Login string `json:"login"`
Name string `json:"name"`
AvatarUrl string `json:"avatar_url"`
Url string `json:"url"`
HtmlUrl string `json:"html_url"`
FollowersUrl string `json:"followers_url"`
FollowingUrl string `json:"following_url"`
GistsUrl string `json:"gists_url"`
StarredUrl string `json:"starred_url"`
SubscriptionsUrl string `json:"subscriptions_url"`
OrganizationsUrl string `json:"organizations_url"`
ReposUrl string `json:"repos_url"`
EventsUrl string `json:"events_url"`
ReceivedEventsUrl string `json:"received_events_url"`
Type string `json:"type"`
} `json:"testers"`
}
type ResultGiteePremiumRepoBranch struct {
Name string `json:"name"`
Commit struct {
Sha string `json:"sha"`
Url string `json:"url"`
} `json:"commit"`
Protected bool `json:"protected"`
ProtectionUrl string `json:"protection_url"`
}
type ResultGetGiteePremiumHook struct {
Id int `json:"id"`
Url string `json:"url"`
CreatedAt time.Time `json:"created_at"`
Password string `json:"password"`
ProjectId int `json:"project_id"`
Result string `json:"result"`
ResultCode int `json:"result_code"`
PushEvents bool `json:"push_events"`
TagPushEvents bool `json:"tag_push_events"`
IssuesEvents bool `json:"issues_events"`
NoteEvents bool `json:"note_events"`
MergeRequestsEvents bool `json:"merge_requests_events"`
}

148
bean/thirdbean/github.go Normal file
View File

@ -0,0 +1,148 @@
package thirdbean
import "time"
type ResultGithubRepo struct {
Id int `json:"id"`
NodeId string `json:"node_id"`
Name string `json:"name"`
FullName string `json:"full_name"`
Private bool `json:"private"`
Owner struct {
Login string `json:"login"`
Id int `json:"id"`
NodeId string `json:"node_id"`
AvatarUrl string `json:"avatar_url"`
GravatarId string `json:"gravatar_id"`
Url string `json:"url"`
HtmlUrl string `json:"html_url"`
FollowersUrl string `json:"followers_url"`
FollowingUrl string `json:"following_url"`
GistsUrl string `json:"gists_url"`
StarredUrl string `json:"starred_url"`
SubscriptionsUrl string `json:"subscriptions_url"`
OrganizationsUrl string `json:"organizations_url"`
ReposUrl string `json:"repos_url"`
EventsUrl string `json:"events_url"`
ReceivedEventsUrl string `json:"received_events_url"`
Type string `json:"type"`
SiteAdmin bool `json:"site_admin"`
} `json:"owner"`
HtmlUrl string `json:"html_url"`
Description *string `json:"description"`
Fork bool `json:"fork"`
Url string `json:"url"`
ForksUrl string `json:"forks_url"`
KeysUrl string `json:"keys_url"`
CollaboratorsUrl string `json:"collaborators_url"`
TeamsUrl string `json:"teams_url"`
HooksUrl string `json:"hooks_url"`
IssueEventsUrl string `json:"issue_events_url"`
EventsUrl string `json:"events_url"`
AssigneesUrl string `json:"assignees_url"`
BranchesUrl string `json:"branches_url"`
TagsUrl string `json:"tags_url"`
BlobsUrl string `json:"blobs_url"`
GitTagsUrl string `json:"git_tags_url"`
GitRefsUrl string `json:"git_refs_url"`
TreesUrl string `json:"trees_url"`
StatusesUrl string `json:"statuses_url"`
LanguagesUrl string `json:"languages_url"`
StargazersUrl string `json:"stargazers_url"`
ContributorsUrl string `json:"contributors_url"`
SubscribersUrl string `json:"subscribers_url"`
SubscriptionUrl string `json:"subscription_url"`
CommitsUrl string `json:"commits_url"`
GitCommitsUrl string `json:"git_commits_url"`
CommentsUrl string `json:"comments_url"`
IssueCommentUrl string `json:"issue_comment_url"`
ContentsUrl string `json:"contents_url"`
CompareUrl string `json:"compare_url"`
MergesUrl string `json:"merges_url"`
ArchiveUrl string `json:"archive_url"`
DownloadsUrl string `json:"downloads_url"`
IssuesUrl string `json:"issues_url"`
PullsUrl string `json:"pulls_url"`
MilestonesUrl string `json:"milestones_url"`
NotificationsUrl string `json:"notifications_url"`
LabelsUrl string `json:"labels_url"`
ReleasesUrl string `json:"releases_url"`
DeploymentsUrl string `json:"deployments_url"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
PushedAt time.Time `json:"pushed_at"`
GitUrl string `json:"git_url"`
SshUrl string `json:"ssh_url"`
CloneUrl string `json:"clone_url"`
SvnUrl string `json:"svn_url"`
Homepage *string `json:"homepage"`
Size int `json:"size"`
StargazersCount int `json:"stargazers_count"`
WatchersCount int `json:"watchers_count"`
Language *string `json:"language"`
HasIssues bool `json:"has_issues"`
HasProjects bool `json:"has_projects"`
HasDownloads bool `json:"has_downloads"`
HasWiki bool `json:"has_wiki"`
HasPages bool `json:"has_pages"`
ForksCount int `json:"forks_count"`
MirrorUrl interface{} `json:"mirror_url"`
Archived bool `json:"archived"`
Disabled bool `json:"disabled"`
OpenIssuesCount int `json:"open_issues_count"`
License *struct {
Key string `json:"key"`
Name string `json:"name"`
SpdxId string `json:"spdx_id"`
Url *string `json:"url"`
NodeId string `json:"node_id"`
} `json:"license"`
Forks int `json:"forks"`
OpenIssues int `json:"open_issues"`
Watchers int `json:"watchers"`
DefaultBranch string `json:"default_branch"`
Permissions struct {
Admin bool `json:"admin"`
Push bool `json:"push"`
Pull bool `json:"pull"`
} `json:"permissions"`
}
type ResultGithubRepoBranch struct {
Name string `json:"name"`
Commit struct {
Sha string `json:"sha"`
Url string `json:"url"`
} `json:"commit"`
Protected bool `json:"protected"`
Protection struct {
RequiredStatusChecks struct {
EnforcementLevel string `json:"enforcement_level"`
Contexts []string `json:"contexts"`
} `json:"required_status_checks"`
} `json:"protection"`
ProtectionUrl string `json:"protection_url"`
}
type ResultGetGithubHook struct {
Type string `json:"type"`
Id int `json:"id"`
Name string `json:"name"`
Active bool `json:"active"`
Events []string `json:"events"`
Config struct {
ContentType string `json:"content_type"`
InsecureSsl string `json:"insecure_ssl"`
Url string `json:"url"`
} `json:"config"`
UpdatedAt time.Time `json:"updated_at"`
CreatedAt time.Time `json:"created_at"`
Url string `json:"url"`
TestUrl string `json:"test_url"`
PingUrl string `json:"ping_url"`
LastResponse struct {
Code interface{} `json:"code"`
Status string `json:"status"`
Message interface{} `json:"message"`
} `json:"last_response"`
}

168
bean/thirdbean/gitlab.go Normal file
View File

@ -0,0 +1,168 @@
package thirdbean
import "time"
type ResultGitlabRepo struct {
Id int `json:"id"`
Description string `json:"description"`
Name string `json:"name"`
NameWithNamespace string `json:"name_with_namespace"`
Path string `json:"path"`
PathWithNamespace string `json:"path_with_namespace"`
CreatedAt time.Time `json:"created_at"`
DefaultBranch string `json:"default_branch"`
TagList []interface{} `json:"tag_list"`
Topics []interface{} `json:"topics"`
SshUrlToRepo string `json:"ssh_url_to_repo"`
HttpUrlToRepo string `json:"http_url_to_repo"`
WebUrl string `json:"web_url"`
ReadmeUrl string `json:"readme_url"`
AvatarUrl interface{} `json:"avatar_url"`
ForksCount int `json:"forks_count"`
StarCount int `json:"star_count"`
LastActivityAt time.Time `json:"last_activity_at"`
Namespace struct {
Id int `json:"id"`
Name string `json:"name"`
Path string `json:"path"`
Kind string `json:"kind"`
FullPath string `json:"full_path"`
ParentId interface{} `json:"parent_id"`
AvatarUrl string `json:"avatar_url"`
WebUrl string `json:"web_url"`
} `json:"namespace"`
ContainerRegistryImagePrefix string `json:"container_registry_image_prefix"`
Links struct {
Self string `json:"self"`
Issues string `json:"issues"`
MergeRequests string `json:"merge_requests"`
RepoBranches string `json:"repo_branches"`
Labels string `json:"labels"`
Events string `json:"events"`
Members string `json:"members"`
} `json:"_links"`
PackagesEnabled bool `json:"packages_enabled"`
EmptyRepo bool `json:"empty_repo"`
Archived bool `json:"archived"`
Visibility string `json:"visibility"`
Owner struct {
Id int `json:"id"`
Name string `json:"name"`
Username string `json:"username"`
State string `json:"state"`
AvatarUrl string `json:"avatar_url"`
WebUrl string `json:"web_url"`
} `json:"owner"`
ResolveOutdatedDiffDiscussions bool `json:"resolve_outdated_diff_discussions"`
ContainerExpirationPolicy struct {
Cadence string `json:"cadence"`
Enabled bool `json:"enabled"`
KeepN int `json:"keep_n"`
OlderThan string `json:"older_than"`
NameRegex string `json:"name_regex"`
NameRegexKeep interface{} `json:"name_regex_keep"`
NextRunAt time.Time `json:"next_run_at"`
} `json:"container_expiration_policy"`
IssuesEnabled bool `json:"issues_enabled"`
MergeRequestsEnabled bool `json:"merge_requests_enabled"`
WikiEnabled bool `json:"wiki_enabled"`
JobsEnabled bool `json:"jobs_enabled"`
SnippetsEnabled bool `json:"snippets_enabled"`
ContainerRegistryEnabled bool `json:"container_registry_enabled"`
ServiceDeskEnabled bool `json:"service_desk_enabled"`
ServiceDeskAddress string `json:"service_desk_address"`
CanCreateMergeRequestIn bool `json:"can_create_merge_request_in"`
IssuesAccessLevel string `json:"issues_access_level"`
RepositoryAccessLevel string `json:"repository_access_level"`
MergeRequestsAccessLevel string `json:"merge_requests_access_level"`
ForkingAccessLevel string `json:"forking_access_level"`
WikiAccessLevel string `json:"wiki_access_level"`
BuildsAccessLevel string `json:"builds_access_level"`
SnippetsAccessLevel string `json:"snippets_access_level"`
PagesAccessLevel string `json:"pages_access_level"`
OperationsAccessLevel string `json:"operations_access_level"`
AnalyticsAccessLevel string `json:"analytics_access_level"`
EmailsDisabled interface{} `json:"emails_disabled"`
SharedRunnersEnabled bool `json:"shared_runners_enabled"`
LfsEnabled bool `json:"lfs_enabled"`
CreatorId int `json:"creator_id"`
ImportStatus string `json:"import_status"`
OpenIssuesCount int `json:"open_issues_count"`
CiDefaultGitDepth int `json:"ci_default_git_depth"`
CiForwardDeploymentEnabled bool `json:"ci_forward_deployment_enabled"`
PublicJobs bool `json:"public_jobs"`
BuildTimeout int `json:"build_timeout"`
AutoCancelPendingPipelines string `json:"auto_cancel_pending_pipelines"`
BuildCoverageRegex interface{} `json:"build_coverage_regex"`
CiConfigPath string `json:"ci_config_path"`
SharedWithGroups []interface{} `json:"shared_with_groups"`
OnlyAllowMergeIfPipelineSucceeds bool `json:"only_allow_merge_if_pipeline_succeeds"`
AllowMergeOnSkippedPipeline interface{} `json:"allow_merge_on_skipped_pipeline"`
RestrictUserDefinedVariables bool `json:"restrict_user_defined_variables"`
RequestAccessEnabled bool `json:"request_access_enabled"`
OnlyAllowMergeIfAllDiscussionsAreResolved bool `json:"only_allow_merge_if_all_discussions_are_resolved"`
RemoveSourceBranchAfterMerge bool `json:"remove_source_branch_after_merge"`
PrintingMergeRequestLinkEnabled bool `json:"printing_merge_request_link_enabled"`
MergeMethod string `json:"merge_method"`
SuggestionCommitMessage interface{} `json:"suggestion_commit_message"`
AutoDevopsEnabled bool `json:"auto_devops_enabled"`
AutoDevopsDeployStrategy string `json:"auto_devops_deploy_strategy"`
AutocloseReferencedIssues bool `json:"autoclose_referenced_issues"`
ExternalAuthorizationClassificationLabel string `json:"external_authorization_classification_label"`
RequirementsEnabled bool `json:"requirements_enabled"`
SecurityAndComplianceEnabled bool `json:"security_and_compliance_enabled"`
ComplianceFrameworks []interface{} `json:"compliance_frameworks"`
Permissions struct {
ProjectAccess struct {
AccessLevel int `json:"access_level"`
NotificationLevel int `json:"notification_level"`
} `json:"project_access"`
GroupAccess interface{} `json:"group_access"`
} `json:"permissions"`
}
type ResultGitlabRepoBranch struct {
Name string `json:"name"`
Merged bool `json:"merged"`
Protected bool `json:"protected"`
Default bool `json:"default"`
DevelopersCanPush bool `json:"developers_can_push"`
DevelopersCanMerge bool `json:"developers_can_merge"`
CanPush bool `json:"can_push"`
WebUrl string `json:"web_url"`
Commit struct {
AuthorEmail string `json:"author_email"`
AuthorName string `json:"author_name"`
AuthoredDate time.Time `json:"authored_date"`
CommittedDate time.Time `json:"committed_date"`
CommitterEmail string `json:"committer_email"`
CommitterName string `json:"committer_name"`
Id string `json:"id"`
ShortId string `json:"short_id"`
Title string `json:"title"`
Message string `json:"message"`
ParentIds []string `json:"parent_ids"`
} `json:"commit"`
}
type ResultGetGitlabHook struct {
Id int `json:"id"`
Url string `json:"url"`
CreatedAt time.Time `json:"created_at"`
PushEvents bool `json:"push_events"`
TagPushEvents bool `json:"tag_push_events"`
MergeRequestsEvents bool `json:"merge_requests_events"`
RepositoryUpdateEvents bool `json:"repository_update_events"`
EnableSslVerification bool `json:"enable_ssl_verification"`
ProjectId int `json:"project_id"`
IssuesEvents bool `json:"issues_events"`
ConfidentialIssuesEvents bool `json:"confidential_issues_events"`
NoteEvents bool `json:"note_events"`
ConfidentialNoteEvents bool `json:"confidential_note_events"`
PipelineEvents bool `json:"pipeline_events"`
WikiPageEvents bool `json:"wiki_page_events"`
DeploymentEvents bool `json:"deployment_events"`
JobEvents bool `json:"job_events"`
ReleasesEvents bool `json:"releases_events"`
PushEventsBranchFilter string `json:"push_events_branch_filter"`
}

29
bean/trigger.go Normal file
View File

@ -0,0 +1,29 @@
package bean
import "errors"
type TriggerParam struct {
Id string `json:"id"`
PipelineId string `json:"pipelineId"`
Types string `json:"types"`
Name string `json:"name"`
Desc string `json:"desc"`
Params string `json:"params"`
Enabled bool ` json:"enabled"`
}
func (c *TriggerParam) Check() error {
if c.PipelineId == "" {
return errors.New("流水线ID不能为空")
}
if c.Types == "" {
return errors.New("触发器类型不能为空")
}
if c.Name == "" {
return errors.New("触发器名称不能为空")
}
if c.Params == "" {
return errors.New("触发器参数不能为空")
}
return nil
}

167
bean/yml.go Normal file
View File

@ -0,0 +1,167 @@
package bean
import (
"encoding/json"
"errors"
"fmt"
"strings"
)
type Pipeline struct {
Version string `yaml:"version,omitempty" json:"version"`
Triggers map[string]*Trigger `yaml:"triggers,omitempty" json:"triggers"`
Vars map[string]string `yaml:"vars,omitempty" json:"vars"`
Stages []*Stage `yaml:"stages,omitempty" json:"stages"`
}
type Trigger struct {
AutoCancel bool `yaml:"autoCancel,omitempty" json:"autoCancel,omitempty"`
Timeout string `yaml:"timeout,omitempty" json:"timeout,omitempty"`
Branches *Condition `yaml:"branches,omitempty" json:"branches,omitempty"`
Tags *Condition `yaml:"tags,omitempty" json:"tags,omitempty"`
Paths *Condition `yaml:"paths,omitempty" json:"paths,omitempty"`
Notes *Condition `yaml:"notes,omitempty" json:"notes,omitempty"`
CommitMessages *Condition `yaml:"commitMessages,omitempty" json:"commitMessages,omitempty"`
}
type Condition struct {
Include []string `yaml:"include,omitempty" json:"include,omitempty"`
Exclude []string `yaml:"exclude,omitempty" json:"exclude,omitempty"`
}
type Stage struct {
Stage string `yaml:"stage" json:"stage"`
Name string `yaml:"name,omitempty" json:"name"`
DisplayName string `yaml:"displayName,omitempty" json:"displayName"`
Steps []*Step `yaml:"steps,omitempty" json:"steps"`
}
/*type Input struct {
Value string `yaml:"value"`
Required bool `yaml:"required"`
}*/
type Step struct {
Step string `yaml:"step" json:"step"`
DisplayName string `yaml:"displayName,omitempty" json:"displayName"`
Name string `yaml:"name,omitempty" json:"name"`
Input map[string]string `yaml:"input,omitempty" json:"input"`
Env map[string]string `yaml:"env,omitempty" json:"env"`
Commands interface{} `yaml:"commands,omitempty" json:"commands"`
Waits []string `yaml:"wait,omitempty" json:"wait"`
Image string `yaml:"image,omitempty" json:"image"`
Artifacts []*Artifact `yaml:"artifacts,omitempty" json:"artifacts"`
UseArtifacts []*UseArtifacts `yaml:"useArtifacts,omitempty" json:"useArtifacts"`
}
type Artifact struct {
Scope string `yaml:"scope,omitempty" json:"scope"`
Repository string `yaml:"repository,omitempty" json:"repository"`
Name string `yaml:"name,omitempty" json:"name"`
Path string `yaml:"path,omitempty" json:"path"`
}
type UseArtifacts struct {
Scope string `yaml:"scope" json:"scope"` //archive,pipeline,env
Repository string `yaml:"repository" json:"repository"` // archive,制品库ID
Name string `yaml:"name" json:"name"` //archive,pipeline,env
//IsForce bool `yaml:"isForce" json:"isForce"`
IsUrl bool `yaml:"isUrl" json:"isUrl"`
Alias string `yaml:"alias" json:"alias"`
Path string `yaml:"path" json:"path"` //archive,pipeline
FromStage string `yaml:"fromStage" json:"sourceStage"` //pipeline
FromStep string `yaml:"fromStep" json:"sourceStep"` //pipeline
}
func (c *Pipeline) ToJson() ([]byte, error) {
c.ConvertCmd()
return json.Marshal(c)
}
func (c *Pipeline) ConvertCmd() {
for _, stage := range c.Stages {
for _, step := range stage.Steps {
v := step.Commands
switch v.(type) {
case string:
step.Commands = v.(string)
case []interface{}:
ls := make([]string, 0)
for _, v1 := range v.([]interface{}) {
ls = append(ls, fmt.Sprintf("%v", v1))
}
step.Commands = ls
default:
step.Commands = fmt.Sprintf("%v", v)
}
}
}
}
func (c *Pipeline) Check() error {
stages := make(map[string]map[string]*Step)
if c.Stages == nil || len(c.Stages) <= 0 {
return errors.New("stages 为空")
}
for _, v := range c.Stages {
if v.Name == "" {
return errors.New("stages name 为空")
}
if v.Steps == nil || len(v.Steps) <= 0 {
return errors.New("step 为空")
}
if _, ok := stages[v.Name]; ok {
return errors.New(fmt.Sprintf("build stages.%s 重复", v.Name))
}
m := map[string]*Step{}
stages[v.Name] = m
for _, e := range v.Steps {
if strings.TrimSpace(e.Step) == "" {
return errors.New("step 插件为空")
}
if e.Name == "" {
return errors.New("step name 为空")
}
if _, ok := m[e.Name]; ok {
return errors.New(fmt.Sprintf("steps.%s 重复", e.Name))
}
m[e.Name] = e
}
}
return nil
}
//func (c *Pipeline) SkipTriggerRules(events string) bool {
// if events != "manual" {
// return true
// }
//
// if c.Triggers == nil || len(c.Triggers) <= 0 {
// logrus.Error("Triggers is empty")
// return false
// }
// switch events {
// case "push", "pr", "comment":
// default:
// logrus.Debugf("not match action:%v", events)
// return false
// }
// v, ok := c.Triggers[events]
// if !ok {
// logrus.Debugf("not match action: %v", events)
// return false
// }
// if v == nil {
// logrus.Debugf("%v trigger is empty",events)
// return false
// }
// if !skipCommitNotes(v.Notes, pb.Info.Note) {
// return false
// } else if !skipBranch(v.Branches, pb.Info.Repository.Branch) {
// return false
// } else if !skipCommitMessages(v.CommitMessages, pb.Info.CommitMessage) {
// return false
// } else {
// logrus.Debugf("%v skip", c.Name)
// return true
// }
//}

4
bindata.sh Normal file
View File

@ -0,0 +1,4 @@
# go get -u github.com/jteeuwen/go-bindata/...
go-bindata -o comm/migrate.go -pkg=comm -prefix migrates migrates/mysql/ migrates/sqlite/

1
bindui.sh Normal file
View File

@ -0,0 +1 @@
go run compressui.go

View File

@ -1,3 +1,3 @@
go build -o bin/gokins main.go
export CGO_ENABLED=0
go build -o ../bin/gokins main.go

View File

@ -1 +0,0 @@
go build -o bin/gokins.exe main.go

82
cmd/cmd.go Normal file
View File

@ -0,0 +1,82 @@
package cmd
import (
"github.com/gokins-main/core"
"github.com/gokins-main/gokins/comm"
"github.com/gokins-main/gokins/server"
hbtp "github.com/mgr9525/HyperByte-Transfer-Protocol"
"gopkg.in/alecthomas/kingpin.v2"
"os"
"os/exec"
"os/signal"
"syscall"
)
const Version = "0.1.1"
var app = kingpin.New("gokins", "A golang workflow application.")
func Run() {
regs()
kingpin.Version(Version)
kingpin.MustParse(app.Parse(os.Args[1:]))
}
func regs() {
app.Flag("web", "gokins web host").Default(":8030").StringVar(&comm.WebHost)
//app.Flag("hbtp", "gokins hbtp host").Default(":8031").StringVar(&comm.HbtpHost)
app.Flag("workdir", "gokins work path").Short('w').StringVar(&comm.WorkPath)
app.Flag("nupass", "can't update password").Hidden().BoolVar(&comm.NotUpPass)
cmd := app.Command("run", "run process").Default().
Action(run)
cmd.Flag("debug", "debug log show").BoolVar(&core.Debug)
cmd = app.Command("daemon", "run process background").
Action(start)
}
func getArgs() []string {
args := make([]string, 0)
args = append(args, "run")
if comm.WebHost != "" {
args = append(args, "--web")
args = append(args, comm.WebHost)
}
/*if comm.HbtpHost != "" {
args = append(args, "--hbtp")
args = append(args, comm.HbtpHost)
}*/
if comm.WorkPath != "" {
args = append(args, "--workdir")
args = append(args, comm.WorkPath)
}
if comm.NotUpPass {
args = append(args, "--nupass")
}
return args
}
func start(pc *kingpin.ParseContext) error {
args := getArgs()
fullpth, err := os.Executable()
if err != nil {
return err
}
println("start process")
cmd := exec.Command(fullpth, args...)
err = cmd.Start()
if err != nil {
return err
}
return nil
}
func run(pc *kingpin.ParseContext) error {
csig := make(chan os.Signal, 1)
signal.Notify(csig, os.Interrupt, syscall.SIGALRM)
go func() {
s := <-csig
hbtp.Debugf("get signal(%d):%s", s, s.String())
comm.Cancel()
}()
if core.Debug {
hbtp.Debug = true
}
return server.Run()
}

37
comm/app.go Normal file
View File

@ -0,0 +1,37 @@
package comm
import (
"context"
"github.com/boltdb/bolt"
"github.com/gin-gonic/gin"
hbtp "github.com/mgr9525/HyperByte-Transfer-Protocol"
"xorm.io/xorm"
)
var (
Ctx context.Context
cncl context.CancelFunc
)
var (
Cfg = Config{}
Db *xorm.Engine
BCache *bolt.DB
WebEgn *gin.Engine
HbtpEgn *hbtp.Engine
IsMySQL = false
Installed = false
NotUpPass = false
WorkPath = ""
WebHost = ""
//HbtpHost = ""
)
func init() {
Ctx, cncl = context.WithCancel(context.Background())
}
func Cancel() {
if cncl != nil {
cncl()
}
}

163
comm/cache.go Normal file
View File

@ -0,0 +1,163 @@
package comm
import (
"bytes"
"encoding/json"
"errors"
"github.com/boltdb/bolt"
hbtp "github.com/mgr9525/HyperByte-Transfer-Protocol"
"github.com/sirupsen/logrus"
"time"
)
var mainCacheBucket = []byte("mainCacheBucket")
func CacheSet(key string, data []byte, outm ...time.Duration) error {
if BCache == nil {
return errors.New("cache not init")
}
err := BCache.Update(func(tx *bolt.Tx) error {
var err error
bk := tx.Bucket(mainCacheBucket)
if bk == nil {
bk, err = tx.CreateBucket(mainCacheBucket)
if err != nil {
return err
}
}
if data == nil {
return bk.Delete([]byte(key))
}
buf := &bytes.Buffer{}
var outms []byte
if len(outm) > 0 {
outms = []byte(time.Now().Add(outm[0]).Format(time.RFC3339Nano))
} else {
outms = []byte(time.Now().Add(time.Hour).Format(time.RFC3339Nano))
}
buf.Write(hbtp.BigIntToByte(int64(len(outms)), 4))
buf.Write(outms)
buf.Write(data)
return bk.Put([]byte(key), buf.Bytes())
})
return err
}
func CacheSets(key string, data interface{}, outm ...time.Duration) error {
if BCache == nil {
return errors.New("cache not init")
}
if data == nil {
return CacheSet(key, nil)
}
bts, err := json.Marshal(data)
if err != nil {
return err
}
return CacheSet(key, bts, outm...)
}
func parseCacheData(bts []byte) []byte {
if bts == nil {
return nil
}
ln := int(hbtp.BigByteToInt(bts[:4]))
tms := string(bts[4 : ln+4])
outm, err := time.Parse(time.RFC3339Nano, tms)
if err != nil {
return nil
}
if time.Since(outm).Milliseconds() < 0 {
return bts[4+ln:]
}
return nil
}
var KeyNotFoundErr = errors.New("key not found")
var KeyOutTimeErr = errors.New("key is timeout")
func CacheGet(key string) ([]byte, error) {
if BCache == nil {
return nil, errors.New("cache not init")
}
var rt []byte
err := BCache.View(func(tx *bolt.Tx) error {
bk := tx.Bucket(mainCacheBucket)
if bk == nil {
return KeyNotFoundErr
}
bts := bk.Get([]byte(key))
if bts == nil {
return KeyNotFoundErr
}
rt = parseCacheData(bts)
if rt == nil {
bk.Delete([]byte(key))
return KeyOutTimeErr
}
return nil
})
if time.Since(mainCacheClearTime).Hours() > 30 {
go mainCacheClear()
}
return rt, err
}
func CacheGets(key string, data interface{}) error {
if BCache == nil {
return errors.New("cache not init")
}
if data == nil {
return errors.New("data not be nil")
}
bts, err := CacheGet(key)
if err != nil {
return err
}
return json.Unmarshal(bts, data)
}
func CacheFlush() error {
if BCache == nil {
return errors.New("cache not init")
}
err := BCache.Update(func(tx *bolt.Tx) error {
return tx.DeleteBucket(mainCacheBucket)
})
return err
}
var mainCacheClearTime time.Time
func mainCacheClear() {
defer func() {
if err := recover(); err != nil {
logrus.Errorf("mainCacheClear recover err:%v", err)
}
}()
if BCache == nil {
return
}
/*if time.Now().Hour()!=3|| time.Since(mainCacheClearTime).Hours() < 30 {
return
}*/
mainCacheClearTime = time.Now()
/*if err := CacheFlush(); err != nil {
logrus.Errorf("mainCacheClear err:%v", err)
}*/
err := BCache.Update(func(tx *bolt.Tx) error {
bk := tx.Bucket(mainCacheBucket)
if bk == nil {
return nil
}
bk.ForEach(func(k, v []byte) error {
data := parseCacheData(v)
if data == nil {
return bk.Delete(k)
}
return nil
})
return nil
})
if err != nil {
logrus.Errorf("mainCacheClear err:%v", err)
}
}

17
comm/config.go Normal file
View File

@ -0,0 +1,17 @@
package comm
type Config struct {
Server struct {
Host string `yaml:"host"` //外网访问地址
LoginKey string `yaml:"loginKey"`
RunLimit int `yaml:"runLimit"`
HbtpHost string `yaml:"hbtpHost"`
Secret string `yaml:"secret"`
Shells []string `yaml:"shells"`
DownToken string `yaml:"DownToken"`
} `yaml:"server"`
Datasource struct {
Driver string `yaml:"driver"`
Url string `yaml:"url"`
} `yaml:"datasource"`
}

View File

@ -1,10 +0,0 @@
package comm
const (
TimeZero = -62135596800
TimeFmt = "2006-01-02 15:04:05"
TimeFmts = "2006-01-02"
TimeFmtm = "2006-01"
TimeFmtt = "20060102150405"
TimeFmtpck = "2006-01-02T15:04:05.999999999Z"
)

View File

@ -1,28 +1,130 @@
package comm
import (
"bytes"
"gokins/model"
"errors"
"fmt"
"reflect"
"strings"
"github.com/go-xorm/xorm"
_ "github.com/mattn/go-sqlite3"
"github.com/gokins-main/gokins/bean"
"xorm.io/builder"
"xorm.io/xorm"
)
func InitDb() error {
db, err := xorm.NewEngine("sqlite3", Dir+"/db.dat")
if err != nil {
return err
type SesFuncHandler = func(ses *xorm.Session)
func findCount(cds builder.Cond, data interface{}) (int64, error) {
if data == nil {
return 0, errors.New("needs a pointer to a slice")
}
Db = db
isext, err := Db.IsTableExist(model.SysUser{})
if err == nil && !isext {
_, err := Db.Import(bytes.NewBufferString(sqls))
if err != nil {
println("Db.Import err:" + err.Error())
of := reflect.TypeOf(data)
if of.Kind() == reflect.Ptr {
of = of.Elem()
}
if of.Kind() == reflect.Slice {
sty := of.Elem()
if sty.Kind() == reflect.Ptr {
sty = sty.Elem()
}
//e:=&models.SysUser{}
//e.Times=time.Now()
//db.Cols("times").Where("xid=?","admin").Update(e)
pv := reflect.New(sty)
ses := Db.NewSession()
defer ses.Close()
return ses.Where(cds).Count(pv.Interface())
}
return nil
return 0, errors.New("GetCount err : not found any data")
}
func FindPage(ses *xorm.Session, ls interface{}, page int64, size ...int64) (*bean.Page, error) {
count, err := findCount(ses.Conds(), ls)
if err != nil {
return nil, err
}
return findPages(ses, ls, count, page, size...)
}
func findPages(ses *xorm.Session, ls interface{}, count, page int64, size ...int64) (*bean.Page, error) {
var pageno int64 = 1
var sizeno int64 = 10
var pagesno int64 = 0
//var count=c.FindCount(pars)
if page > 0 {
pageno = page
}
if len(size) > 0 && size[0] > 0 {
sizeno = size[0]
}
start := (pageno - 1) * sizeno
err := ses.Limit(int(sizeno), int(start)).Find(ls)
if err != nil {
return nil, err
}
pagest := count / sizeno
if count%sizeno > 0 {
pagesno = pagest + 1
} else {
pagesno = pagest
}
return &bean.Page{
Page: pageno,
Pages: pagesno,
Size: sizeno,
Total: count,
Data: ls,
}, nil
}
func FindPages(gen *bean.PageGen, ls interface{}, page int64, size ...int64) (*bean.Page, error) {
var count int64
counts := "count(*)"
if gen.CountCols != "" {
counts = fmt.Sprintf("count(%s)", gen.CountCols)
}
sqls := strings.Replace(gen.SQL, "{{select}}", counts, 1)
sqls = strings.Replace(sqls, "{{limit}}", "", 1)
_, err := Db.SQL(sqls, gen.Args...).Get(&count)
if err != nil {
return nil, err
}
var pageno int64 = 1
var sizeno int64 = 10
var pagesno int64 = 0
//var count=c.FindCount(pars)
if page > 0 {
pageno = page
}
if len(size) > 0 && size[0] > 0 {
sizeno = size[0]
}
start := (pageno - 1) * sizeno
starts := ""
if start > 0 {
starts = fmt.Sprintf("%d,", start)
}
ses := Db.NewSession()
defer ses.Close()
sqls = strings.Replace(gen.SQL, "{{select}}", gen.FindCols, 1)
if strings.Contains(sqls, "{{limit}}") {
sqls = strings.Replace(sqls, "{{limit}}", fmt.Sprintf("LIMIT %s%d", starts, sizeno), 1)
} else {
sqls += fmt.Sprintf("\nLIMIT %s%d", starts, sizeno)
}
err = ses.SQL(sqls, gen.Args...).Find(ls)
if err != nil {
return nil, err
}
pagest := count / sizeno
if count%sizeno > 0 {
pagesno = pagest + 1
} else {
pagesno = pagest
}
return &bean.Page{
Page: pageno,
Pages: pagesno,
Size: sizeno,
Total: count,
Data: ls,
}, nil
}

View File

@ -1,208 +0,0 @@
package comm
const sqls = `
/*
Navicat Premium Data Transfer
Source Server : gokinsdb
Source Server Type : SQLite
Source Server Version : 3030001
Source Schema : main
Target Server Type : SQLite
Target Server Version : 3030001
File Encoding : 65001
Date: 30/10/2020 11:08:09
*/
PRAGMA foreign_keys = false;
-- ----------------------------
-- Table structure for sys_param
-- ----------------------------
DROP TABLE IF EXISTS "sys_param";
CREATE TABLE "sys_param" (
"id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
"key" varchar,
"cont" blob,
"times" datetime
);
-- ----------------------------
-- Table structure for sys_user
-- ----------------------------
DROP TABLE IF EXISTS "sys_user";
CREATE TABLE "sys_user" (
"id" integer,
"xid" text NOT NULL,
"name" text NOT NULL,
"pass" text,
"nick" text,
"phone" text,
"times" datetime,
"logintm" datetime,
"fwtm" datetime,
"avat" text,
PRIMARY KEY ("id")
);
-- ----------------------------
-- Table structure for t_model
-- ----------------------------
DROP TABLE IF EXISTS "t_model";
CREATE TABLE "t_model" (
"id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
"uid" varchar,
"title" text,
"desc" text,
"times" datetime,
"del" integer DEFAULT 0,
"envs" text,
"wrkdir" text,
"clrdir" integer DEFAULT 0
);
-- ----------------------------
-- Table structure for t_model_run
-- ----------------------------
DROP TABLE IF EXISTS "t_model_run";
CREATE TABLE "t_model_run" (
"id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
"uid" varchar,
"tid" integer,
"times" datetime,
"timesd" datetime,
"state" integer,
"errs" text,
"tgid" integer,
"tgtyps" text
);
-- ----------------------------
-- Table structure for t_output
-- ----------------------------
DROP TABLE IF EXISTS "t_output";
CREATE TABLE "t_output" (
"id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
"type" varchar(50),
"tid" integer,
"output" text,
"times" datetime
);
-- ----------------------------
-- Table structure for t_plugin
-- ----------------------------
DROP TABLE IF EXISTS "t_plugin";
CREATE TABLE "t_plugin" (
"id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
"tid" integer NOT NULL,
"title" text,
"type" integer DEFAULT 0,
"para" text,
"cont" text,
"times" datetime,
"sort" integer DEFAULT 100,
"del" integer DEFAULT 0,
"exend" integer DEFAULT 0
);
-- ----------------------------
-- Table structure for t_plugin_run
-- ----------------------------
DROP TABLE IF EXISTS "t_plugin_run";
CREATE TABLE "t_plugin_run" (
"id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
"pid" integer,
"mid" integer,
"tid" integer,
"times" datetime,
"timesd" datetime,
"state" integer,
"excode" integer
);
-- ----------------------------
-- Table structure for t_trigger
-- ----------------------------
DROP TABLE IF EXISTS "t_trigger";
CREATE TABLE "t_trigger" (
"id" integer NOT NULL,
"uid" varchar,
"types" varchar,
"title" varchar,
"desc" text,
"times" date,
"config" text,
"del" integer DEFAULT 0,
"enable" integer DEFAULT 0,
"errs" text,
"mid" integer,
"meid" integer,
"opt1" text,
"opt2" text,
"opt3" text,
PRIMARY KEY ("id")
);
-- ----------------------------
-- Auto increment value for sys_param
-- ----------------------------
-- ----------------------------
-- Indexes structure for table sys_param
-- ----------------------------
CREATE INDEX "main"."key"
ON "sys_param" (
"key" ASC
);
-- ----------------------------
-- Indexes structure for table sys_user
-- ----------------------------
CREATE INDEX "main"."IDX_sys_user_phone"
ON "sys_user" (
"phone" ASC
);
CREATE INDEX "main"."name"
ON "sys_user" (
"name" ASC
);
CREATE INDEX "main"."xid"
ON "sys_user" (
"xid" ASC
);
-- ----------------------------
-- Auto increment value for t_model
-- ----------------------------
-- ----------------------------
-- Auto increment value for t_model_run
-- ----------------------------
-- ----------------------------
-- Auto increment value for t_output
-- ----------------------------
-- ----------------------------
-- Indexes structure for table t_output
-- ----------------------------
CREATE INDEX "main"."kv"
ON "t_output" (
"type" ASC,
"tid" ASC
);
-- ----------------------------
-- Auto increment value for t_plugin
-- ----------------------------
-- ----------------------------
-- Auto increment value for t_plugin_run
-- ----------------------------
PRAGMA foreign_keys = true;
`

141
comm/migrate.go Normal file

File diff suppressed because one or more lines are too long

48
comm/thirdapi.go Normal file
View File

@ -0,0 +1,48 @@
package comm
import (
"github.com/gokins-main/gokins/thirdapi"
"github.com/gokins-main/gokins/thirdapi/giteaapi"
"github.com/gokins-main/gokins/thirdapi/giteeapi"
"github.com/gokins-main/gokins/thirdapi/giteepremiumapi"
"github.com/gokins-main/gokins/thirdapi/githubapi"
"github.com/gokins-main/gokins/thirdapi/gitlabapi"
"github.com/sirupsen/logrus"
)
var (
apiClient *thirdapi.Client
)
func GetThirdApi(s string, host string) (*thirdapi.Client, error) {
if apiClient == nil {
switch s {
case "gitee":
apiClient = giteeapi.NewDefault()
case "github":
apiClient = githubapi.NewDefault()
case "gitalb":
client, err := gitlabapi.New(host + "/api/v4")
if err != nil {
return nil, err
}
apiClient = client
case "giteepremium":
client, err := giteepremiumapi.New(host + "/api/v5")
if err != nil {
return nil, err
}
apiClient = client
case "gitea":
client, err := giteaapi.New(host + "/api/v1")
if err != nil {
return nil, err
}
apiClient = client
default:
logrus.Debug("GetThirdApi default : 'github' ")
apiClient = githubapi.NewDefault()
}
}
return apiClient, nil
}

3
comm/uis.go Normal file

File diff suppressed because one or more lines are too long

View File

@ -1,17 +0,0 @@
package comm
import (
"github.com/gin-gonic/gin"
"github.com/go-xorm/xorm"
)
var (
Dir string
Path string
Host string
NoUppass bool
Gin *gin.Engine
Db *xorm.Engine
RunTaskLen int = 5
)

File diff suppressed because one or more lines are too long

48
compressui.go Normal file
View File

@ -0,0 +1,48 @@
package main
import (
"encoding/base64"
"flag"
"fmt"
"github.com/gokins-main/core/utils"
"io/ioutil"
"os"
"path/filepath"
)
var pth string
func main() {
flag.StringVar(&pth, "d", "", "ui dir")
flag.Parse()
if pth == "" {
pth = "../web/dist"
}
err := gengo()
if err != nil {
println("bdzip err:" + err.Error())
}
}
func gengo() error {
zipfl := filepath.Join(utils.HomePath(), "dist.zip")
os.RemoveAll(zipfl)
defer os.RemoveAll(zipfl)
err := utils.Zip(pth, zipfl, true)
if err != nil {
return err
}
bts, err := ioutil.ReadFile(zipfl)
if err != nil {
return err
}
cont := base64.StdEncoding.EncodeToString(bts)
err = ioutil.WriteFile("comm/uis.go",
[]byte(fmt.Sprintf("package comm\n\nconst StaticPkg = \"%s\"", cont)),
0644)
if err != nil {
return err
}
println("ui insert go ok!!!")
return nil
}

View File

@ -1,513 +0,0 @@
package core
import (
"encoding/json"
"errors"
"fmt"
"reflect"
"strconv"
"strings"
ruisUtil "github.com/mgr9525/go-ruisutil"
)
// Maps2Struct 转换
func Maps2Struct(mp *ruisUtil.Map, dist interface{}) error {
return Map2Struct(mp.Map(), dist)
}
// Map2Struct 转换
func Map2Struct(mp map[string]interface{}, dist interface{}) (rterr error) {
defer func() {
if errs := recover(); errs != nil {
rterr = errors.New(fmt.Sprintf("%v", errs))
}
}()
if mp == nil || dist == nil {
return errors.New("map/dist is nil")
}
tf := reflect.TypeOf(dist)
vf := reflect.ValueOf(dist)
if vf.IsNil() {
return errors.New("dist is nil")
}
if tf.Kind() != reflect.Ptr {
return errors.New("dist is not pointer")
}
tf = tf.Elem()
vf = vf.Elem()
if tf.Kind() != reflect.Struct {
return errors.New("dist is not struct")
}
for i := 0; i < tf.NumField(); i++ {
tfid := tf.Field(i)
v, ok := mp[tfid.Name]
if !ok {
name := strings.Split(tfid.Tag.Get("json"), ",")[0]
if name == "" {
continue
}
v, ok = mp[name]
if !ok {
continue
}
}
tfd := tfid.Type
vfd := vf.FieldByIndex(tfid.Index)
if !vfd.CanSet() {
continue
}
if reflect.TypeOf(v).AssignableTo(tfd) {
vfd.Set(reflect.ValueOf(v))
continue
}
tfdc := tfd
vfdc := vfd
if tfd.Kind() == reflect.Ptr {
tfdc = tfd.Elem()
if vfd.Elem().Kind() == reflect.Invalid {
nv := reflect.New(tfdc)
vfd.Set(nv)
}
vfdc = vfd.Elem()
}
switch tfdc.Kind() {
case reflect.String:
vfdc.SetString(getString(v))
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
vfdc.SetInt(getInt(v))
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
vfdc.SetUint(getUint(v))
case reflect.Float32, reflect.Float64:
vfdc.SetFloat(getFloat(v))
case reflect.Bool:
vfdc.SetBool(getBool(v))
case reflect.Struct:
mpd, ok := v.(map[string]interface{})
if ok {
if err := Map2Struct(mpd, vfdc.Addr().Interface()); err != nil {
println(err.Error())
}
}
case reflect.Slice:
ls, err := Obj2Slice(v, tfdc)
if err != nil {
println(err.Error())
}
vfd.Set(ls)
default:
setValue(vfdc, v)
}
}
return nil
}
// pbStruct2Map p为protobuf生成的结构体指针
func pbStruct2Map(p interface{}) map[string]interface{} {
res := map[string]interface{}{}
if p == nil {
return res
}
t := reflect.TypeOf(p)
v := reflect.ValueOf(p)
if t.Kind() == reflect.Ptr {
if v.Elem().Kind() == reflect.Invalid {
return res
}
v = reflect.Indirect(v)
t = t.Elem()
}
for i := 0; i < t.NumField(); i++ {
tag := t.Field(i).Tag.Get("json")
if tag == "" || tag == "-" {
continue
}
tags := strings.Split(tag, ",")
if len(tags) == 0 {
continue
}
fValue := v.Field(i)
fType := t.Field(i).Type.Kind()
if fType == reflect.Ptr {
if fValue.Kind() != reflect.Invalid {
res[tags[0]] = pbStruct2Map(fValue.Interface())
continue
}
} else if fType == reflect.Slice {
lenth := fValue.Len()
if lenth > 0 {
out := make([]interface{}, lenth)
for j := 0; j < lenth; j++ {
tmpv := fValue.Index(j).Interface()
if fValue.Index(j).Kind() == reflect.Ptr {
out[j] = pbStruct2Map(tmpv)
} else {
out[j] = tmpv
}
}
res[tags[0]] = out
} else {
res[tags[0]] = []interface{}{}
}
continue
}
res[tags[0]] = fValue.Interface()
}
return res
}
// Struct2Struct 转换
func Struct2Struct(src interface{}, dist interface{}) (rterr error) {
dugName := ""
defer func() {
if errs := recover(); errs != nil {
rterr = errors.New(fmt.Sprintf("dugName=%s, errs=%+v", dugName, errs))
}
}()
if src == nil {
return nil
}
if dist == nil {
return errors.New("dist is nil")
}
tf := reflect.TypeOf(dist)
vf := reflect.ValueOf(dist)
if vf.IsNil() {
return errors.New("dist is nil")
}
if tf.Kind() != reflect.Ptr {
return errors.New("dist is not pointer")
}
tf = tf.Elem()
vf = vf.Elem()
if tf.Kind() != reflect.Struct {
return errors.New("dist is not struct")
}
tf1 := reflect.TypeOf(src)
vf1 := reflect.ValueOf(src)
if vf1.IsNil() {
return nil
}
if tf1.Kind() == reflect.Ptr {
tf1 = tf1.Elem()
vf1 = vf1.Elem()
}
if tf1.Kind() != reflect.Struct {
return errors.New("src is not struct")
}
for i := 0; i < tf.NumField(); i++ {
tfid := tf.Field(i)
tf1fd, ok := tf1.FieldByName(tfid.Name)
if !ok {
lws := strings.ToLower(tfid.Name)
tags := strings.Split(tfid.Tag.Get("conv"), ",")
if lws == "id" {
tags = []string{"ID", "Id", "id"}
}
if strings.Contains(lws, "id") {
tags = append(tags, strings.ReplaceAll(tfid.Name, "Id", "ID"))
tags = append(tags, strings.ReplaceAll(tfid.Name, "ID", "Id"))
}
for _, v := range tags {
if v == "" {
continue
}
tf1fd, ok = tf1.FieldByName(v)
if ok {
break
}
}
if !ok {
continue
}
}
dugName = tf1fd.Name
vf1fd := vf1.FieldByIndex(tf1fd.Index)
if !vf1fd.CanInterface() {
continue
}
v := vf1fd.Interface()
tfd := tfid.Type
vfd := vf.FieldByIndex(tfid.Index)
if !vfd.CanSet() {
continue
}
if tf1fd.Type.AssignableTo(tfd) {
vfd.Set(vf1fd)
continue
}
tfdc := tfd
vfdc := vfd
if tfd.Kind() == reflect.Ptr {
tfdc = tfd.Elem()
if vf1fd.Elem().Kind() == reflect.Invalid {
continue
}
if vfd.Elem().Kind() == reflect.Invalid {
nv := reflect.New(tfdc)
vfd.Set(nv)
}
vfdc = vfd.Elem()
}
switch vfdc.Kind() {
case reflect.String:
vfdc.SetString(getString(v))
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
vfdc.SetInt(getInt(v))
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
vfdc.SetUint(getUint(v))
case reflect.Float32, reflect.Float64:
vfdc.SetFloat(getFloat(v))
case reflect.Bool:
vfdc.SetBool(getBool(v))
case reflect.Struct:
if err := Struct2Struct(v, vfdc.Addr().Interface()); err != nil {
println(err.Error())
}
case reflect.Slice:
ls, err := Obj2Slice(v, tfdc)
if err != nil {
println(err.Error())
}
vfdc.Set(ls)
default:
setValue(vfd, v)
}
}
return nil
}
func setValue(vf reflect.Value, v interface{}) {
defer func() {
if errs := recover(); errs != nil {
println("setValue name:%s,err=%s", vf.String(), errs)
}
}()
vf.Set(reflect.ValueOf(v))
}
func getBool(v interface{}) bool {
vf := reflect.ValueOf(v)
switch vf.Kind() {
case reflect.Bool:
return v.(bool)
case reflect.String:
return v.(string) == "true"
}
return false
}
func getString(v interface{}) string {
vf := reflect.ValueOf(v)
switch vf.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return fmt.Sprintf("%d", v)
case reflect.Float32, reflect.Float64:
return fmt.Sprintf("%f", v)
}
return fmt.Sprintf("%v", v)
}
func getInt(v interface{}) int64 {
vf := reflect.ValueOf(v)
switch vf.Kind() {
case reflect.Int:
return int64(v.(int))
case reflect.Int8:
return int64(v.(int8))
case reflect.Int16:
return int64(v.(int16))
case reflect.Int32:
return int64(v.(int32))
case reflect.Int64:
return v.(int64)
case reflect.Uint:
return int64(v.(uint))
case reflect.Uint8:
return int64(v.(uint8))
case reflect.Uint16:
return int64(v.(uint16))
case reflect.Uint32:
return int64(v.(uint32))
case reflect.Uint64:
return int64(v.(uint64))
case reflect.Float32:
return int64(v.(float32))
case reflect.Float64:
return int64(v.(float64))
case reflect.String:
vc, _ := strconv.ParseInt(v.(string), 10, 64)
return vc
}
return 0
}
func getUint(v interface{}) uint64 {
vf := reflect.ValueOf(v)
switch vf.Kind() {
case reflect.Int:
return uint64(v.(int))
case reflect.Int8:
return uint64(v.(int8))
case reflect.Int16:
return uint64(v.(int16))
case reflect.Int32:
return uint64(v.(int32))
case reflect.Int64:
return uint64(v.(int64))
case reflect.Uint:
return uint64(v.(uint))
case reflect.Uint8:
return uint64(v.(uint8))
case reflect.Uint16:
return uint64(v.(uint16))
case reflect.Uint32:
return uint64(v.(uint32))
case reflect.Uint64:
return v.(uint64)
case reflect.Float32:
return uint64(v.(float32))
case reflect.Float64:
return uint64(v.(float64))
case reflect.String:
vc, _ := strconv.ParseUint(v.(string), 10, 64)
return vc
}
return 0
}
func getFloat(v interface{}) float64 {
vf := reflect.ValueOf(v)
switch vf.Kind() {
case reflect.Int:
return float64(v.(int))
case reflect.Int8:
return float64(v.(int8))
case reflect.Int16:
return float64(v.(int16))
case reflect.Int32:
return float64(v.(int32))
case reflect.Int64:
return float64(v.(int64))
case reflect.Uint:
return float64(v.(uint))
case reflect.Uint8:
return float64(v.(uint8))
case reflect.Uint16:
return float64(v.(uint16))
case reflect.Uint32:
return float64(v.(uint32))
case reflect.Uint64:
return float64(v.(uint64))
case reflect.Float32:
return float64(v.(float32))
case reflect.Float64:
return v.(float64)
case reflect.String:
vc, _ := strconv.ParseFloat(v.(string), 64)
return vc
}
return 0
}
// Bytes2Struct 转换
func Bytes2Struct(bts []byte, dist interface{}) error {
mp := make(map[string]interface{})
err := json.Unmarshal(bts, &mp)
if err != nil {
return err
}
return Map2Struct(mp, dist)
}
// Obj2Slice 转换
func Obj2Slice(obj interface{}, dtf reflect.Type) (ret reflect.Value, rterr error) {
defer func() {
if errs := recover(); errs != nil {
rterr = errors.New(fmt.Sprintf("dtfName=%s, errs=%+v", dtf.Name(), errs))
}
}()
objs := reflect.ValueOf(obj)
if objs.Kind() != reflect.Slice {
return reflect.Value{}, errors.New("objs is not slice")
}
dist := reflect.MakeSlice(dtf, 0, 0)
dtf1 := dtf.Elem()
dtf2 := dtf1
if dtf1.Kind() == reflect.Ptr {
dtf2 = dtf1.Elem()
}
for i := 0; i < objs.Len(); i++ {
valf := objs.Index(i)
val := valf.Interface()
nv := reflect.New(dtf2)
tf := reflect.TypeOf(val)
vf := reflect.ValueOf(val)
if tf.Kind() == reflect.Ptr {
tf = tf.Elem()
if vf.Elem().Kind() == reflect.Invalid {
continue
}
vf = vf.Elem()
}
switch dtf2.Kind() {
case reflect.String:
nv = nv.Elem()
nv.SetString(getString(val))
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
nv = nv.Elem()
nv.SetInt(getInt(val))
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
nv = nv.Elem()
nv.SetUint(getUint(val))
case reflect.Float32, reflect.Float64:
nv = nv.Elem()
nv.SetFloat(getFloat(val))
case reflect.Bool:
nv = nv.Elem()
nv.SetBool(getBool(val))
case reflect.Struct:
mpd, ok := val.(map[string]interface{})
if ok {
if err := Map2Struct(mpd, nv.Interface()); err != nil {
println(err.Error())
}
} else {
if err := Struct2Struct(val, nv.Interface()); err != nil {
println(err.Error())
}
}
case reflect.Slice:
ls, err := Obj2Slice(val, dtf2)
if err != nil {
println(err.Error())
}
nv.Set(ls)
default:
nv = valf
}
dist = reflect.Append(dist, nv)
}
return dist, nil
}
/*// Obj2Slice 转换
func Obj2Slice(obj interface{}, dist interface{}) error {
bts, err := json.Marshal(obj)
if err != nil {
return err
}
err = json.Unmarshal(bts, dist)
if err != nil {
return err
}
return nil
}*/

View File

@ -1,157 +0,0 @@
package core
import (
"net/http"
"net/url"
"reflect"
"strings"
"time"
"github.com/dgrijalva/jwt-go"
"github.com/gin-gonic/gin"
ruisUtil "github.com/mgr9525/go-ruisutil"
)
const cookieName = "gokinsk"
func BindMapJSON(c *gin.Context) (*ruisUtil.Map, error) {
pars := ruisUtil.NewMap()
err := c.BindJSON(pars)
return pars, err
}
func CreateToken(p *jwt.MapClaims, tmout time.Duration) (string, error) {
claims := *p
claims["times"] = time.Now().Format(time.RFC3339Nano)
if tmout > 0 {
claims["timeout"] = time.Now().Add(tmout).Format(time.RFC3339Nano)
}
token := jwt.NewWithClaims(jwt.SigningMethodHS512, claims)
tokens, err := token.SignedString([]byte(JwtKey))
if err != nil {
return "", err
}
return tokens, nil
}
func SetToken(c *gin.Context, p *jwt.MapClaims, rem bool, doman ...string) (string, error) {
tmout := time.Hour * 5
if rem {
tmout = time.Hour * 24 * 5
}
tokens, err := CreateToken(p, tmout)
if err != nil {
return "", err
}
cke := http.Cookie{Name: cookieName, Value: tokens, HttpOnly: false}
if JwtCookiePath != "" {
cke.Path = JwtCookiePath
}
if len(doman) > 0 {
cke.Domain = doman[0]
}
cke.MaxAge = 60 * 60 * 5
if rem {
cke.MaxAge = 60 * 60 * 24 * 5
}
c.Header("Set-Cookie", cke.String())
return tokens, nil
}
func ClearToken(c *gin.Context, doman ...string) {
cke := http.Cookie{Name: cookieName, Value: "", HttpOnly: false}
if JwtCookiePath != "" {
cke.Path = JwtCookiePath
}
if len(doman) > 0 {
cke.Domain = doman[0]
}
cke.MaxAge = -1
c.Header("Set-Cookie", cke.String())
}
var secret = func(token *jwt.Token) (interface{}, error) {
return []byte(JwtKey), nil
}
func GetToken(c *gin.Context) jwt.MapClaims {
tks := ""
ats := c.Request.Header.Get("Authorization")
if ats != "" {
aths, err := url.PathUnescape(ats)
if err == nil && strings.HasPrefix(aths, "TOKEN ") {
tks = strings.Replace(aths, "TOKEN ", "", 1)
}
}
if tks == "" {
tkc, err := c.Request.Cookie(cookieName)
if err == nil {
tks = tkc.Value
}
}
if tks != "" {
tk, err := jwt.Parse(tks, secret)
if err == nil {
claim, ok := tk.Claims.(jwt.MapClaims)
if ok {
return claim
}
}
}
return nil
}
func MidAccessAllow(c *gin.Context) {
c.Header("Access-Control-Allow-Origin", c.Request.Header.Get("Origin"))
c.Header("Access-Control-Allow-Methods", "*")
c.Header("Access-Control-Allow-Headers", "DNT,X-Mx-ReqToken,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Authorization")
c.Header("Access-Control-Allow-Credentials", "true")
if c.Request.Method == "OPTIONS" {
c.String(200, "request ok!")
c.Abort()
}
}
func GinHandler(fn interface{}) func(c *gin.Context) {
return func(c *gin.Context) {
tf := reflect.TypeOf(fn)
vf := reflect.ValueOf(fn)
if tf.Kind() != reflect.Func {
c.String(500, "func err")
return
}
if tf.NumIn() <= 0 {
vf.Call(nil)
return
}
if tf.NumIn() <= 1 {
vf.Call([]reflect.Value{reflect.ValueOf(c)})
return
}
reqtf := tf.In(1)
reqtf1 := reqtf
if reqtf.Kind() == reflect.Ptr {
reqtf1 = reqtf.Elem()
}
reqvf := reflect.Zero(reqtf)
mp := ruisUtil.NewMap()
if err := c.BindJSON(mp.PMap()); err != nil {
c.String(500, "param err:"+err.Error())
return
}
if reqtf.AssignableTo(reflect.TypeOf(mp)) {
reqvf = reflect.ValueOf(mp)
} else if reqtf1.Kind() == reflect.Struct {
reqvf = reflect.New(reqtf1)
err := Maps2Struct(mp, reqvf.Interface())
if err != nil {
c.String(500, "param err")
return
}
}
vf.Call([]reflect.Value{reflect.ValueOf(c), reqvf})
}
}

View File

@ -1,74 +0,0 @@
package core
import (
"errors"
"reflect"
"github.com/go-xorm/xorm"
)
type Page struct {
Page int64
Size int64
Pages int64
Total int64
Data interface{}
}
func XormFindCount(ses *xorm.Session, rowsSlicePtr interface{}) (int64, error) {
sliceValue := reflect.Indirect(reflect.ValueOf(rowsSlicePtr))
if sliceValue.Kind() != reflect.Slice && sliceValue.Kind() != reflect.Map {
return 0, errors.New("needs a pointer to a slice or a map")
}
sliceElementType := sliceValue.Type().Elem()
if sliceElementType.Kind() == reflect.Ptr {
if sliceElementType.Elem().Kind() == reflect.Struct {
pv := reflect.New(sliceElementType.Elem())
return ses.Clone().Count(pv.Interface())
}
} else if sliceElementType.Kind() == reflect.Struct {
pv := reflect.New(sliceElementType)
return ses.Clone().Count(pv.Interface())
}
return 0, errors.New("not found table")
}
func XormFindPage(ses *xorm.Session, ls interface{}, page int64, size ...int64) (*Page, error) {
count, err := XormFindCount(ses, ls)
if err != nil {
return nil, err
}
return XormFindPages(ses, ls, count, page, size...)
}
func XormFindPages(ses *xorm.Session, ls interface{}, count, page int64, size ...int64) (*Page, error) {
var pageno int64 = 1
var sizeno int64 = 10
var pagesno int64 = 0
//var count=c.FindCount(pars)
if page > 0 {
pageno = page
}
if len(size) > 0 && size[0] > 0 {
sizeno = size[0]
}
start := (pageno - 1) * sizeno
err := ses.Limit(int(sizeno), int(start)).Find(ls)
if err != nil {
return nil, err
}
pagest := count / sizeno
if count%sizeno > 0 {
pagesno = pagest + 1
} else {
pagesno = pagest
}
return &Page{
Page: pageno,
Pages: pagesno,
Size: sizeno,
Total: count,
Data: ls,
}, nil
}

View File

@ -1,23 +0,0 @@
package core
import (
"math/rand"
"time"
)
var (
JwtKey = ""
JwtCookiePath = "/"
)
// RandomString 随机生成字符串
func RandomString(l int) string {
str := "0123456789AaBbCcDdEeFfGgHhIiJjKkLlMmNnOoPpQqRrSsTtUuVvWwXxYyZz"
bts := []byte(str)
var result []byte
r := rand.New(rand.NewSource(time.Now().UnixNano()))
for i := 0; i < l; i++ {
result = append(result, bts[r.Intn(len(bts))])
}
return string(result)
}

Binary file not shown.

View File

@ -1,203 +0,0 @@
/*
Navicat Premium Data Transfer
Source Server : gokinsdb
Source Server Type : SQLite
Source Server Version : 3030001
Source Schema : main
Target Server Type : SQLite
Target Server Version : 3030001
File Encoding : 65001
Date: 30/10/2020 11:08:09
*/
PRAGMA foreign_keys = false;
-- ----------------------------
-- Table structure for sys_param
-- ----------------------------
DROP TABLE IF EXISTS "sys_param";
CREATE TABLE "sys_param" (
"id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
"key" varchar,
"cont" blob,
"times" datetime
);
-- ----------------------------
-- Table structure for sys_user
-- ----------------------------
DROP TABLE IF EXISTS "sys_user";
CREATE TABLE "sys_user" (
"id" integer,
"xid" text NOT NULL,
"name" text NOT NULL,
"pass" text,
"nick" text,
"phone" text,
"times" datetime,
"logintm" datetime,
"fwtm" datetime,
"avat" text,
PRIMARY KEY ("id")
);
-- ----------------------------
-- Table structure for t_model
-- ----------------------------
DROP TABLE IF EXISTS "t_model";
CREATE TABLE "t_model" (
"id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
"uid" varchar,
"title" text,
"desc" text,
"times" datetime,
"del" integer DEFAULT 0,
"envs" text,
"wrkdir" text,
"clrdir" integer DEFAULT 0
);
-- ----------------------------
-- Table structure for t_model_run
-- ----------------------------
DROP TABLE IF EXISTS "t_model_run";
CREATE TABLE "t_model_run" (
"id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
"uid" varchar,
"tid" integer,
"times" datetime,
"timesd" datetime,
"state" integer,
"errs" text,
"tgid" integer,
"tgtyps" text
);
-- ----------------------------
-- Table structure for t_output
-- ----------------------------
DROP TABLE IF EXISTS "t_output";
CREATE TABLE "t_output" (
"id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
"type" varchar(50),
"tid" integer,
"output" text,
"times" datetime
);
-- ----------------------------
-- Table structure for t_plugin
-- ----------------------------
DROP TABLE IF EXISTS "t_plugin";
CREATE TABLE "t_plugin" (
"id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
"tid" integer NOT NULL,
"title" text,
"type" integer DEFAULT 0,
"para" text,
"cont" text,
"times" datetime,
"sort" integer DEFAULT 100,
"del" integer DEFAULT 0,
"exend" integer DEFAULT 0
);
-- ----------------------------
-- Table structure for t_plugin_run
-- ----------------------------
DROP TABLE IF EXISTS "t_plugin_run";
CREATE TABLE "t_plugin_run" (
"id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
"pid" integer,
"mid" integer,
"tid" integer,
"times" datetime,
"timesd" datetime,
"state" integer,
"excode" integer
);
-- ----------------------------
-- Table structure for t_trigger
-- ----------------------------
DROP TABLE IF EXISTS "t_trigger";
CREATE TABLE "t_trigger" (
"id" integer NOT NULL,
"uid" varchar,
"types" varchar,
"title" varchar,
"desc" text,
"times" date,
"config" text,
"del" integer DEFAULT 0,
"enable" integer DEFAULT 0,
"errs" text,
"mid" integer,
"meid" integer,
"opt1" text,
"opt2" text,
"opt3" text,
PRIMARY KEY ("id")
);
-- ----------------------------
-- Auto increment value for sys_param
-- ----------------------------
-- ----------------------------
-- Indexes structure for table sys_param
-- ----------------------------
CREATE INDEX "main"."key"
ON "sys_param" (
"key" ASC
);
-- ----------------------------
-- Indexes structure for table sys_user
-- ----------------------------
CREATE INDEX "main"."IDX_sys_user_phone"
ON "sys_user" (
"phone" ASC
);
CREATE INDEX "main"."name"
ON "sys_user" (
"name" ASC
);
CREATE INDEX "main"."xid"
ON "sys_user" (
"xid" ASC
);
-- ----------------------------
-- Auto increment value for t_model
-- ----------------------------
-- ----------------------------
-- Auto increment value for t_model_run
-- ----------------------------
-- ----------------------------
-- Auto increment value for t_output
-- ----------------------------
-- ----------------------------
-- Indexes structure for table t_output
-- ----------------------------
CREATE INDEX "main"."kv"
ON "t_output" (
"type" ASC,
"tid" ASC
);
-- ----------------------------
-- Auto increment value for t_plugin
-- ----------------------------
-- ----------------------------
-- Auto increment value for t_plugin_run
-- ----------------------------
PRAGMA foreign_keys = true;

View File

@ -1,19 +0,0 @@
FROM golang:1.15.2-alpine AS builder
RUN apk add git gcc libc-dev && git clone https://github.com/mgr9525/gokins.git /build
WORKDIR /build
RUN GOOS=linux GOARCH=amd64 go build -o bin/gokins main.go
FROM alpine:latest AS final
RUN apk update \
&& apk upgrade \
&& apk --no-cache add openssl \
&& apk --no-cache add ca-certificates \
&& rm -rf /var/cache/apk \
&& mkdir -p /app
COPY --from=builder /build/bin/gokins /app
WORKDIR /app
ENTRYPOINT ["/app/gokins"]

173
engine/buildDao.go Normal file
View File

@ -0,0 +1,173 @@
package engine
import (
"github.com/gokins-main/core/common"
"github.com/gokins-main/core/runtime"
"github.com/gokins-main/gokins/comm"
"github.com/gokins-main/gokins/model"
"github.com/sirupsen/logrus"
"runtime/debug"
"time"
)
func (c *BuildTask) updateBuild(build *runtime.Build) {
defer func() {
if err := recover(); err != nil {
logrus.Warnf("BuildTask updateBuild recover:%v", err)
logrus.Warnf("BuildTask stack:%s", string(debug.Stack()))
}
}()
e := &model.TBuild{
Status: build.Status,
Error: build.Error,
Event: build.Event,
Started: build.Started,
Finished: build.Finished,
Updated: time.Now(),
}
_, err := comm.Db.Cols("status", "event", "error", "started", "finished", "updated").
Where("id=?", build.Id).Update(e)
if err != nil {
logrus.Errorf("BuildTask.updateBuild db err:%v", err)
}
if !common.BuildStatusEnded(e.Status) {
return
}
stge := &model.TStage{
Status: common.BuildStatusCancel,
Finished: time.Now(),
Updated: time.Now(),
}
_, err = comm.Db.Cols("status", "finished", "updated").
Where("build_id=? and `status`!=? and `status`!=? and `status`!=?",
build.Id, common.BuildStatusOk, common.BuildStatusError, common.BuildStatusCancel).Update(stge)
if err != nil {
logrus.Errorf("BuildTask.updateBuild stage err:%v", err)
}
stpe := &model.TStep{
Status: common.BuildStatusCancel,
Finished: time.Now(),
Updated: time.Now(),
}
_, err = comm.Db.Cols("status", "finished", "updated").
Where("build_id=? and `status`!=? and `status`!=? and `status`!=?",
build.Id, common.BuildStatusOk, common.BuildStatusError, common.BuildStatusCancel).Update(stpe)
if err != nil {
logrus.Errorf("BuildTask.updateBuild step err:%v", err)
}
cmde := &model.TCmdLine{
Status: common.BuildStatusCancel,
Finished: time.Now(),
}
_, err = comm.Db.Cols("status", "finished").
Where("build_id=? and `status`!=? and `status`!=? and `status`!=?",
build.Id, common.BuildStatusOk, common.BuildStatusError, common.BuildStatusCancel).Update(cmde)
if err != nil {
logrus.Errorf("BuildTask.updateStage step err:%v", err)
}
}
func (c *BuildTask) updateStage(stage *runtime.Stage) {
defer func() {
if err := recover(); err != nil {
logrus.Warnf("BuildTask updateBuild recover:%v", err)
logrus.Warnf("BuildTask stack:%s", string(debug.Stack()))
}
}()
e := &model.TStage{
Status: stage.Status,
Error: stage.Error,
Started: stage.Started,
Finished: stage.Finished,
Updated: time.Now(),
}
_, err := comm.Db.Cols("status", "error", "started", "finished", "updated").
Where("id=?", stage.Id).Update(e)
if err != nil {
logrus.Errorf("BuildTask.updateStage db err:%v", err)
}
if !common.BuildStatusEnded(e.Status) {
return
}
stpe := &model.TStep{
Status: common.BuildStatusCancel,
Finished: time.Now(),
Updated: time.Now(),
}
_, err = comm.Db.Cols("status", "finished", "updated").
Where("stage_id=? and `status`!=? and `status`!=? and `status`!=?",
stage.Id, common.BuildStatusOk, common.BuildStatusError, common.BuildStatusCancel).Update(stpe)
if err != nil {
logrus.Errorf("BuildTask.updateStage step err:%v", err)
}
}
func (c *BuildTask) updateStep(job *jobSync) {
defer func() {
if err := recover(); err != nil {
logrus.Warnf("BuildTask updateBuild recover:%v", err)
logrus.Warnf("BuildTask stack:%s", string(debug.Stack()))
}
}()
job.RLock()
defer job.RUnlock()
e := &model.TStep{
Status: job.step.Status,
Event: job.step.Event,
Error: job.step.Error,
ExitCode: job.step.ExitCode,
Started: job.step.Started,
Finished: job.step.Finished,
Updated: time.Now(),
}
_, err := comm.Db.Cols("status", "event", "error", "exit_code", "started", "finished", "updated").
Where("id=?", job.step.Id).Update(e)
if err != nil {
logrus.Errorf("BuildTask.updateStep db err:%v", err)
}
if !common.BuildStatusEnded(e.Status) {
return
}
cmde := &model.TCmdLine{
Status: common.BuildStatusCancel,
Finished: time.Now(),
}
_, err = comm.Db.Cols("status", "finished").
Where("step_id=? and `status`!=? and `status`!=? and `status`!=?",
job.step.Id, common.BuildStatusOk, common.BuildStatusError, common.BuildStatusCancel).Update(cmde)
if err != nil {
logrus.Errorf("BuildTask.updateStage step err:%v", err)
}
}
func (c *BuildTask) updateStepCmd(cmd *cmdSync) {
defer func() {
if err := recover(); err != nil {
logrus.Warnf("BuildTask updateBuild recover:%v", err)
logrus.Warnf("BuildTask stack:%s", string(debug.Stack()))
}
}()
cmd.RLock()
defer cmd.RUnlock()
cmde := &model.TCmdLine{
Status: cmd.status,
Code: cmd.code,
}
cols := []string{"status"}
switch cmd.status {
case common.BuildStatusRunning:
cmde.Started = cmd.started
cols = append(cols, "started")
default:
cmde.Finished = cmd.finished
cols = append(cols, "finished")
}
_, err := comm.Db.Cols(cols...).Where("id=?", cmd.cmd.Id).Update(cmde)
if err != nil {
logrus.Errorf("BuildTask.updateStep db err:%v", err)
}
}

123
engine/buildEgn.go Normal file
View File

@ -0,0 +1,123 @@
package engine
import (
"container/list"
"github.com/gokins-main/core/common"
"github.com/gokins-main/core/runtime"
"github.com/gokins-main/gokins/comm"
hbtp "github.com/mgr9525/HyperByte-Transfer-Protocol"
"github.com/sirupsen/logrus"
"runtime/debug"
"sync"
"time"
)
type BuildEngine struct {
tskwlk sync.RWMutex
taskw *list.List
tskslk sync.RWMutex
tasks map[string]*BuildTask
}
func StartBuildEngine() *BuildEngine {
if comm.Cfg.Server.RunLimit < 2 {
comm.Cfg.Server.RunLimit = 5
}
c := &BuildEngine{
taskw: list.New(),
tasks: make(map[string]*BuildTask),
}
go func() {
c.init()
for !hbtp.EndContext(comm.Ctx) {
c.run()
time.Sleep(time.Second)
}
}()
return c
}
func (c *BuildEngine) Stop() {
c.tskslk.RLock()
defer c.tskslk.RUnlock()
for _, v := range c.tasks {
v.stop()
}
}
func (c *BuildEngine) init() {
/*// TODO: 调试不执行
if comm.Debugs {
return
}*/
cont := "server restart"
comm.Db.Exec(
"update `t_build` set `status`=?,`error`=? where `status`!=? and `status`!=? and `status`!=?",
common.BuildStatusCancel, cont, common.BuildStatusOk, common.BuildStatusError, common.BuildStatusCancel,
)
comm.Db.Exec(
"update `t_stage` set `status`=?,`error`=? where `status`!=? and `status`!=? and `status`!=?",
common.BuildStatusCancel, cont, common.BuildStatusOk, common.BuildStatusError, common.BuildStatusCancel,
)
comm.Db.Exec(
"update `t_step` set `status`=?,`error`=? where `status`!=? and `status`!=? and `status`!=?",
common.BuildStatusCancel, cont, common.BuildStatusOk, common.BuildStatusError, common.BuildStatusCancel,
)
comm.Db.Exec(
"update `t_cmd_line` set `status`=? where `status`!=? and `status`!=? and `status`!=?",
common.BuildStatusCancel, common.BuildStatusOk, common.BuildStatusError, common.BuildStatusCancel,
)
}
func (c *BuildEngine) run() {
defer func() {
if err := recover(); err != nil {
logrus.Warnf("BuildEngine run recover:%v", err)
logrus.Warnf("BuildEngine stack:%s", string(debug.Stack()))
}
}()
c.tskwlk.RLock()
ln1 := c.taskw.Len()
c.tskwlk.RUnlock()
c.tskslk.RLock()
ln2 := len(c.tasks)
c.tskslk.RUnlock()
if ln1 > 0 && ln2 < comm.Cfg.Server.RunLimit {
c.tskwlk.RLock()
e := c.taskw.Front()
c.tskwlk.RUnlock()
if e == nil {
return
}
c.tskwlk.Lock()
c.taskw.Remove(e)
c.tskwlk.Unlock()
v := NewBuildTask(c, e.Value.(*runtime.Build))
c.tskslk.Lock()
c.tasks[v.build.Id] = v
c.tskslk.Unlock()
go c.startBuild(v)
}
}
func (c *BuildEngine) startBuild(v *BuildTask) {
v.run()
c.tskslk.Lock()
defer c.tskslk.Unlock()
delete(c.tasks, v.build.Id)
}
func (c *BuildEngine) Put(bd *runtime.Build) {
c.tskwlk.Lock()
defer c.tskwlk.Unlock()
c.taskw.PushBack(bd)
}
func (c *BuildEngine) Get(buildid string) (*BuildTask, bool) {
if buildid == "" {
return nil, false
}
c.tskslk.Lock()
defer c.tskslk.Unlock()
v, ok := c.tasks[buildid]
return v, ok
}

521
engine/buildTask.go Normal file
View File

@ -0,0 +1,521 @@
package engine
import (
"context"
"fmt"
"os"
"path/filepath"
"regexp"
"runtime/debug"
"strconv"
"sync"
"time"
"github.com/go-git/go-git/v5"
ghttp "github.com/go-git/go-git/v5/plumbing/transport/http"
"github.com/gokins-main/core/common"
"github.com/gokins-main/core/runtime"
"github.com/gokins-main/gokins/comm"
"github.com/gokins-main/gokins/util"
hbtp "github.com/mgr9525/HyperByte-Transfer-Protocol"
"github.com/sirupsen/logrus"
)
type taskStage struct {
sync.RWMutex
wg sync.WaitGroup
stage *runtime.Stage
jobs map[string]*jobSync
}
func (c *taskStage) status(stat, errs string, event ...string) {
c.Lock()
defer c.Unlock()
c.stage.Status = stat
c.stage.Error = errs
if len(event) > 0 {
c.stage.Event = event[0]
}
}
type BuildTask struct {
egn *BuildEngine
ctx context.Context
cncl context.CancelFunc
bdlk sync.RWMutex
build *runtime.Build
bngtm time.Time
endtm time.Time
ctrlendtm time.Time
staglk sync.RWMutex
stages map[string]*taskStage // key:name
joblk sync.RWMutex
jobs map[string]*jobSync //key:id
buildPath string
repoPaths string //fs
workpgss int
isClone bool
repoPath string
}
func (c *BuildTask) status(stat, errs string, event ...string) {
c.bdlk.Lock()
defer c.bdlk.Unlock()
c.build.Status = stat
c.build.Error = errs
if len(event) > 0 {
c.build.Event = event[0]
}
}
func NewBuildTask(egn *BuildEngine, bd *runtime.Build) *BuildTask {
c := &BuildTask{egn: egn, build: bd}
return c
}
func (c *BuildTask) stopd() bool {
if c.ctx == nil {
return true
}
return hbtp.EndContext(c.ctx)
}
func (c *BuildTask) stop() {
c.ctrlendtm = time.Time{}
if c.cncl != nil {
c.cncl()
}
}
func (c *BuildTask) Cancel() {
c.ctrlendtm = time.Now()
if c.cncl != nil {
c.cncl()
}
}
func (c *BuildTask) clears() {
defer func() {
if err := recover(); err != nil {
logrus.Warnf("BuildTask clears recover:%v", err)
logrus.Warnf("BuildTask stack:%s", string(debug.Stack()))
}
}()
if c.isClone {
os.RemoveAll(c.repoPaths)
}
for _, v := range c.jobs {
pth := filepath.Join(c.buildPath, common.PathJobs, v.step.Id, common.PathArts)
os.RemoveAll(pth)
}
}
func (c *BuildTask) run() {
defer func() {
if err := recover(); err != nil {
logrus.Warnf("BuildTask run recover:%v", err)
logrus.Warnf("BuildTask stack:%s", string(debug.Stack()))
}
}()
defer func() {
c.endtm = time.Now()
c.build.Finished = time.Now()
c.updateBuild(c.build)
c.clears()
}()
c.buildPath = filepath.Join(comm.WorkPath, common.PathBuild, c.build.Id)
c.repoPaths = filepath.Join(c.buildPath, common.PathRepo)
err := os.MkdirAll(c.buildPath, 0750)
if err != nil {
c.status(common.BuildStatusError, "build path err:"+err.Error(), common.BuildEventPath)
return
}
c.bngtm = time.Now()
c.stages = make(map[string]*taskStage)
c.jobs = make(map[string]*jobSync)
c.build.Started = time.Now()
c.build.Status = common.BuildStatusPending
if !c.check() {
c.build.Status = common.BuildStatusError
return
}
c.ctx, c.cncl = context.WithTimeout(comm.Ctx, time.Hour*2+time.Minute*5)
c.build.Status = common.BuildStatusPreparation
err = c.getRepo()
if err != nil {
logrus.Errorf("clone repo err:%v", err)
c.status(common.BuildStatusError, "repo err:"+err.Error(), common.BuildEventGetRepo)
return
}
c.build.Status = common.BuildStatusRunning
for _, v := range c.build.Stages {
v.Status = common.BuildStatusPending
for _, e := range v.Steps {
e.Status = common.BuildStatusPending
}
}
c.updateBuild(c.build)
logrus.Debugf("BuildTask run build:%s,pgss:%d", c.build.Id, c.workpgss)
c.workpgss = 100
for _, v := range c.build.Stages {
c.runStage(v)
if v.Status != common.BuildStatusOk {
c.build.Status = v.Status
return
}
}
c.build.Status = common.BuildStatusOk
}
func (c *BuildTask) runStage(stage *runtime.Stage) {
defer func() {
stage.Finished = time.Now()
c.updateStage(stage)
logrus.Debugf("stage %s end!!!", stage.Name)
if err := recover(); err != nil {
logrus.Warnf("BuildTask runStage recover:%v", err)
logrus.Warnf("BuildTask stack:%s", string(debug.Stack()))
}
}()
stage.Started = time.Now()
stage.Status = common.BuildStatusRunning
//c.logfile.WriteString(fmt.Sprintf("\n****************Stage+ %s\n", stage.Name))
c.updateStage(stage)
c.staglk.RLock()
stg, ok := c.stages[stage.Name]
c.staglk.RUnlock()
if !ok {
stg.status(common.BuildStatusError, fmt.Sprintf("not found stage?:%s", stage.Name))
return
}
c.staglk.RLock()
for _, v := range stage.Steps {
stg.RLock()
jb, ok := stg.jobs[v.Name]
stg.RUnlock()
if !ok {
jb.status(common.BuildStatusError, "")
break
}
stg.wg.Add(1)
go c.runStep(stg, jb)
}
c.staglk.RUnlock()
stg.wg.Wait()
for _, v := range stg.jobs {
v.RLock()
ign := v.step.ErrIgnore
status := v.step.Status
errs := v.step.Error
v.RUnlock()
if !ign && status == common.BuildStatusError {
stg.status(status, errs)
return
} else if status == common.BuildStatusCancel {
stg.status(status, errs)
return
}
}
stage.Status = common.BuildStatusOk
}
func (c *BuildTask) runStep(stage *taskStage, job *jobSync) {
defer stage.wg.Done()
defer func() {
job.ended = true
job.step.Finished = time.Now()
go c.updateStep(job)
if err := recover(); err != nil {
logrus.Warnf("BuildTask runStep recover:%v", err)
logrus.Warnf("BuildTask stack:%s", string(debug.Stack()))
}
}()
if len(job.runjb.Commands) <= 0 {
job.status(common.BuildStatusError, "command format empty", common.BuildEventJobCmds)
return
}
job.RLock()
dendons := job.step.Waits
job.RUnlock()
if len(dendons) > 0 {
ls := make([]*jobSync, 0)
for _, v := range dendons {
if v == "" {
continue
}
stage.RLock()
e, ok := stage.jobs[v]
stage.RUnlock()
//core.Log.Debugf("job(%s) depend %s(ok:%t)",job.step.Name,v,ok)
if !ok {
job.status(common.BuildStatusError, fmt.Sprintf("wait on %s not found", v))
return
}
if e.step.Name == job.step.Name {
job.status(common.BuildStatusError, fmt.Sprintf("wait on %s is your self", job.step.Name))
return
}
ls = append(ls, e)
}
for !hbtp.EndContext(comm.Ctx) {
time.Sleep(time.Millisecond * 100)
if c.stopd() {
job.status(common.BuildStatusCancel, "")
return
}
waitln := len(ls)
for _, v := range ls {
v.Lock()
vStats := v.step.Status
v.Unlock()
if vStats == common.BuildStatusOk {
waitln--
} else if vStats == common.BuildStatusCancel {
job.status(common.BuildStatusCancel, "")
return
} else if vStats == common.BuildStatusError {
if v.step.ErrIgnore {
waitln--
} else {
job.status(common.BuildStatusError, fmt.Sprintf("wait on %s is err", v.step.Name))
return
}
}
}
if waitln <= 0 {
break
}
}
}
job.Lock()
job.ended = false
job.step.Status = common.BuildStatusPreparation
job.step.Started = time.Now()
job.Unlock()
go c.updateStep(job)
err := Mgr.jobEgn.Put(job)
if err != nil {
job.status(common.BuildStatusError, fmt.Sprintf("command run err:%v", err))
return
}
logrus.Debugf("BuildTask put step:%s", job.step.Name)
for !hbtp.EndContext(comm.Ctx) {
job.Lock()
stats := job.step.Status
job.Unlock()
if common.BuildStatusEnded(stats) {
break
}
if c.stopd() && time.Since(c.ctrlendtm).Seconds() > 3 {
job.status(common.BuildStatusCancel, "cancel")
break
}
/*if c.ctrlend && time.Since(c.ctrlendtm).Seconds() > 3 {
job.status(common.BuildStatusError, "cancel")
break
}*/
time.Sleep(time.Millisecond * 10)
}
/*job.Lock()
defer job.Unlock()
if c.ctrlend && job.step.Status == common.BuildStatusError {
job.step.Status = common.BuildStatusCancel
}*/
}
func (c *BuildTask) getRepo() error {
if !c.isClone {
return nil
}
os.MkdirAll(c.repoPaths, 0750)
err := c.gitClone(c.ctx, c.repoPaths, c.build.Repo)
if err != nil {
return err
}
return nil
}
var regBfb = regexp.MustCompile(`:\s+(\d+)% \(\d+\/\d+\)`)
func (c *BuildTask) Write(bts []byte) (n int, err error) {
defer func() {
if err := recover(); err != nil {
logrus.Warnf("BuildTask gitWrite recover:%v", err)
logrus.Warnf("BuildTask stack:%s", string(debug.Stack()))
}
}()
ln := len(bts)
line := string(bts)
if ln > 0 && regBfb.MatchString(line) {
subs := regBfb.FindAllStringSubmatch(line, -1)[0]
if len(subs) > 1 {
p, err := strconv.Atoi(subs[1])
if err == nil {
c.workpgss = int(float64(p) * 0.8)
}
}
}
println("BuildTask git log:", line)
return ln, nil
}
func (c *BuildTask) gitClone(ctx context.Context, dir string, repo *runtime.Repository) error {
clonePath := filepath.Join(dir)
gc := &git.CloneOptions{
URL: repo.CloneURL,
Progress: c,
}
if repo.Name != "" {
gc.Auth = &ghttp.BasicAuth{
Username: repo.Name,
Password: repo.Token,
}
}
logrus.Debugf("gitClone : clone url: %s sha: %s", repo.CloneURL, repo.Sha)
repository, err := util.CloneRepo(clonePath, gc, ctx)
if err != nil {
return err
}
if repo.Sha != "" {
err = util.CheckOutHash(repository, repo.Sha)
if err != nil {
return err
}
}
return nil
}
func (c *BuildTask) UpJob(job *jobSync, stat, errs string, code int) {
if job == nil || stat == "" {
return
}
job.Lock()
job.step.Status = stat
job.step.Error = errs
job.step.ExitCode = code
job.Unlock()
go c.updateStep(job)
}
func (c *BuildTask) UpJobCmd(cmd *cmdSync, fs, code int) {
if cmd == nil {
return
}
cmd.Lock()
defer cmd.Unlock()
switch fs {
case 1:
cmd.status = common.BuildStatusRunning
cmd.started = time.Now()
case 2:
cmd.status = common.BuildStatusOk
if code != 0 {
cmd.code = code
cmd.status = common.BuildStatusError
}
cmd.finished = time.Now()
case 3:
cmd.code = code
cmd.status = common.BuildStatusCancel
cmd.finished = time.Now()
case -1:
cmd.code = code
cmd.status = common.BuildStatusError
cmd.finished = time.Now()
default:
return
}
go c.updateStepCmd(cmd)
}
func (c *BuildTask) WorkProgress() int {
return c.workpgss
}
func (c *BuildTask) Show() (*runtime.BuildShow, bool) {
if c.stopd() {
return nil, false
}
c.bdlk.RLock()
rtbd := &runtime.BuildShow{
Id: c.build.Id,
PipelineId: c.build.PipelineId,
Status: c.build.Status,
Error: c.build.Error,
Event: c.build.Event,
Started: c.build.Started,
Finished: c.build.Finished,
Created: c.build.Created,
Updated: c.build.Updated,
}
c.bdlk.RUnlock()
for _, v := range c.build.Stages {
c.staglk.RLock()
stg, ok := c.stages[v.Name]
c.staglk.RUnlock()
if !ok {
continue
}
stg.RLock()
rtstg := &runtime.StageShow{
Id: stg.stage.Id,
BuildId: stg.stage.BuildId,
Status: stg.stage.Status,
Event: stg.stage.Event,
Error: stg.stage.Error,
Started: stg.stage.Started,
Stopped: stg.stage.Stopped,
Finished: stg.stage.Finished,
Created: stg.stage.Created,
Updated: stg.stage.Updated,
}
stg.RUnlock()
rtbd.Stages = append(rtbd.Stages, rtstg)
for _, st := range v.Steps {
c.staglk.RLock()
job, ok := stg.jobs[st.Name]
c.staglk.RUnlock()
if !ok {
continue
}
job.RLock()
rtstp := &runtime.StepShow{
Id: job.step.Id,
StageId: job.step.StageId,
BuildId: job.step.BuildId,
Status: job.step.Status,
Event: job.step.Event,
Error: job.step.Error,
ExitCode: job.step.ExitCode,
Started: job.step.Started,
Stopped: job.step.Stopped,
Finished: job.step.Finished,
}
rtstg.Steps = append(rtstg.Steps, rtstp)
for _, cmd := range job.cmdmp {
rtstp.Cmds = append(rtstp.Cmds, &runtime.CmdShow{
Id: cmd.cmd.Id,
Status: cmd.status,
Started: cmd.started,
Finished: cmd.finished,
})
}
job.RUnlock()
}
}
return rtbd, true
}
func (c *BuildTask) GetJob(id string) (*jobSync, bool) {
if id == "" {
return nil, false
}
c.joblk.RLock()
defer c.joblk.RUnlock()
job, ok := c.jobs[id]
return job, ok
}

276
engine/buildTaskc.go Normal file
View File

@ -0,0 +1,276 @@
package engine
import (
"errors"
"fmt"
"github.com/gokins-main/core/common"
"github.com/gokins-main/core/runtime"
"github.com/gokins-main/core/utils"
"github.com/gokins-main/gokins/comm"
"github.com/gokins-main/gokins/model"
"github.com/gokins-main/runner/runners"
"github.com/sirupsen/logrus"
"os"
"runtime/debug"
"strings"
"time"
)
func (c *BuildTask) check() bool {
if c.build.Repo == nil {
c.status(common.BuildEventCheckParam, "repo param err")
return false
}
if c.build.Repo.CloneURL == "" {
c.status(common.BuildEventCheckParam, "repo param err:clone url")
return false
}
c.repoPath = c.build.Repo.CloneURL
s, err := os.Stat(c.repoPath)
if err == nil && s.IsDir() {
c.isClone = false
c.repoPaths = c.repoPath
} else {
if !common.RegUrl.MatchString(c.build.Repo.CloneURL) {
c.status(common.BuildEventCheckParam, "repo param err:clone url")
return false
}
c.isClone = true
}
if c.build.Stages == nil || len(c.build.Stages) <= 0 {
c.build.Event = common.BuildEventCheckParam
c.build.Error = "build Stages is empty"
return false
}
stages := make(map[string]*taskStage)
for _, v := range c.build.Stages {
if v.BuildId != c.build.Id {
c.build.Event = common.BuildEventCheckParam
c.build.Error = fmt.Sprintf("Stage Build id err:%s/%s", v.BuildId, c.build.Id)
return false
}
if v.Name == "" {
c.build.Event = common.BuildEventCheckParam
c.build.Error = "build Stage name is empty"
return false
}
if v.Steps == nil || len(v.Steps) <= 0 {
c.build.Event = common.BuildEventCheckParam
c.build.Error = "build Stages is empty"
return false
}
if _, ok := stages[v.Name]; ok {
c.build.Event = common.BuildEventCheckParam
c.build.Error = fmt.Sprintf("build Stages.%s is repeat", v.Name)
return false
}
vs := &taskStage{
stage: v,
jobs: make(map[string]*jobSync),
}
stages[v.Name] = vs
for _, e := range v.Steps {
if e.BuildId != c.build.Id {
c.build.Event = common.BuildEventCheckParam
c.build.Error = fmt.Sprintf("Job Build id err:%s/%s", v.BuildId, c.build.Id)
return false
}
if e.StageId != v.Id {
c.build.Event = common.BuildEventCheckParam
c.build.Error = fmt.Sprintf("Job Stage id err:%s/%s", v.BuildId, c.build.Id)
return false
}
e.Step = strings.TrimSpace(e.Step)
if e.Step == "" {
c.build.Event = common.BuildEventCheckParam
c.build.Error = "build Step Plugin is empty"
return false
}
if e.Name == "" {
c.build.Event = common.BuildEventCheckParam
c.build.Error = "build Step name is empty"
return false
}
if _, ok := vs.jobs[e.Name]; ok {
c.build.Event = common.BuildEventCheckParam
c.build.Error = fmt.Sprintf("build Job.%s is repeat", e.Name)
return false
}
job := &jobSync{
task: c,
step: e,
cmdmp: make(map[string]*cmdSync),
}
err = c.genRunjob(v, job)
if err != nil {
c.build.Event = common.BuildEventCheckParam
c.build.Error = fmt.Sprintf("build Job.%s Commands err", e.Name)
return false
}
vs.RLock()
vs.jobs[e.Name] = job
vs.RUnlock()
c.joblk.Lock()
c.jobs[e.Id] = job
c.joblk.Unlock()
}
}
/*for _,v:=range stages{
for _,e:=range v.jobs{
err:=Mgr.jobEgn.Put(e)
if err!=nil{
c.build.Event = common.BuildEventPutJob
c.build.Error=err.Error()
return false
}
}
}*/
for k, v := range stages {
c.stages[k] = v
}
return true
}
func (c *BuildTask) genRunjob(stage *runtime.Stage, job *jobSync) (rterr error) {
defer func() {
if err := recover(); err != nil {
rterr = fmt.Errorf("recover:%v", err)
logrus.Warnf("BuildTask genRunjob recover:%v", err)
logrus.Warnf("BuildTask stack:%s", string(debug.Stack()))
}
}()
runjb := &runners.RunJob{
Id: job.step.Id,
StageId: job.step.StageId,
BuildId: job.step.BuildId,
StageName: stage.Name,
Step: job.step.Step,
Name: job.step.Name,
Input: job.step.Input,
Env: job.step.Env,
Artifacts: job.step.Artifacts,
UseArtifacts: job.step.UseArtifacts,
}
var err error
switch job.step.Commands.(type) {
case string:
c.appendcmds(runjb, job.step.Commands.(string))
case []interface{}:
err = c.gencmds(runjb, job.step.Commands.([]interface{}))
case []string:
var ls []interface{}
ts := job.step.Commands.([]string)
for _, v := range ts {
ls = append(ls, v)
}
err = c.gencmds(runjb, ls)
default:
err = errors.New("commands format err")
}
if err != nil {
return err
}
if len(runjb.Commands) <= 0 {
return errors.New("command format empty")
}
job.runjb = runjb
for i, v := range runjb.Commands {
job.cmdmp[v.Id] = &cmdSync{
cmd: v,
status: common.BuildStatusPending,
}
cmd := &model.TCmdLine{
Id: v.Id,
//GroupId: v.Gid,
BuildId: job.step.BuildId,
StepId: job.step.Id,
Status: common.BuildStatusPending,
Num: i + 1,
Content: v.Conts,
Created: time.Now(),
}
vls := common.RegVar.FindAllStringSubmatch(v.Conts, -1)
for _, zs := range vls {
k := zs[1]
if k == "" {
continue
}
vas := ""
secret := false
va, ok := c.build.Vars[k]
if ok {
vas = va.Value
secret = va.Secret
}
v.Conts = strings.ReplaceAll(v.Conts, zs[0], vas)
if secret {
cmd.Content = strings.ReplaceAll(cmd.Content, zs[0], "***")
} else {
cmd.Content = strings.ReplaceAll(cmd.Content, zs[0], vas)
}
}
_, err = comm.Db.InsertOne(cmd)
if err != nil {
comm.Db.Where("build_id=? and step_id=?", cmd.BuildId, cmd.StepId).Delete(cmd)
return err
}
}
return nil
}
func (c *BuildTask) appendcmds(runjb *runners.RunJob, conts string) {
m := &runners.CmdContent{
Id: utils.NewXid(),
Conts: conts,
}
logrus.Debugf("append cmd(%d)-%s", len(runjb.Commands), m.Conts)
//job.Commands[m.Id] = m
runjb.Commands = append(runjb.Commands, m)
}
func (c *BuildTask) gencmds(runjb *runners.RunJob, cmds []interface{}) (rterr error) {
defer func() {
if err := recover(); err != nil {
logrus.Warnf("BuildTask gencmds recover:%v", err)
logrus.Warnf("BuildTask stack:%s", string(debug.Stack()))
rterr = fmt.Errorf("%v", err)
}
}()
for _, v := range cmds {
switch v.(type) {
case string:
//gid := utils.NewXid()
//grp:=&hbtpBean.CmdGroupJson{Id: utils.NewXid()}
c.appendcmds(runjb, v.(string))
case []interface{}:
//gid := utils.NewXid()
for _, v1 := range v.([]interface{}) {
c.appendcmds(runjb, fmt.Sprintf("%v", v1))
}
case map[interface{}]interface{}:
for _, v1 := range v.(map[interface{}]interface{}) {
//gid := utils.NewXid()
switch v1.(type) {
case string:
c.appendcmds(runjb, fmt.Sprintf("%v", v1))
case []interface{}:
for _, v2 := range v1.([]interface{}) {
c.appendcmds(runjb, fmt.Sprintf("%v", v2))
}
}
}
case map[string]interface{}:
for _, v1 := range v.(map[string]interface{}) {
//gid := utils.NewXid()
switch v1.(type) {
case string:
c.appendcmds(runjb, fmt.Sprintf("%v", v1))
case []interface{}:
for _, v2 := range v1.([]interface{}) {
c.appendcmds(runjb, fmt.Sprintf("%v", v2))
}
}
}
}
}
return nil
}

28
engine/buildtask_test.go Normal file
View File

@ -0,0 +1,28 @@
package engine
import (
"fmt"
"github.com/gokins-main/core/runtime"
"testing"
)
func TestGitClone(t *testing.T) {
task := BuildTask{
build: &runtime.Build{
Id: "1231",
Repo: &runtime.Repository{
Name: "",
Token: "",
Sha: "c202ee042db1fc8b8c16c6c968195cec6185d7db",
CloneURL: "https://gitee.com/SuperHeroJim/gokins-test",
},
},
}
err := task.getRepo()
if err != nil {
fmt.Println(err)
return
}
fmt.Println(task.repoPath)
fmt.Println(task.isClone)
}

234
engine/hbtpruner.go Normal file
View File

@ -0,0 +1,234 @@
package engine
import (
"fmt"
"github.com/gokins-main/core/common"
"github.com/gokins-main/core/utils"
"github.com/gokins-main/gokins/comm"
"github.com/gokins-main/runner/runners"
hbtp "github.com/mgr9525/HyperByte-Transfer-Protocol"
"strconv"
"time"
)
type HbtpRunner struct {
}
func (HbtpRunner) AuthFun() hbtp.AuthFun {
return func(c *hbtp.Context) bool {
cmds := c.Command()
times := c.Args().Get("times")
random := c.Args().Get("random")
sign := c.Args().Get("sign")
if cmds == "" || len(times) <= 5 || len(random) < 20 || sign == "" {
c.ResString(hbtp.ResStatusAuth, "auth param err")
return false
}
signs := utils.Md5String(cmds + random + times + comm.Cfg.Server.Secret)
if sign != signs {
println("token err:" + sign)
c.ResString(hbtp.ResStatusAuth, "token err:"+sign)
return false
}
tm, err := strconv.ParseInt(times, 10, 64)
if err != nil {
c.ResString(hbtp.ResStatusAuth, "times err:"+err.Error())
return false
}
tms := time.Unix(tm, 0)
/*if err != nil {
c.ResString(hbtp.ResStatusAuth, "times err:"+err.Error())
return false
}*/
hbtp.Debugf("HbtpRunnerAuth parse.times:%s", tms.Format(common.TimeFmt))
return true
}
}
func (HbtpRunner) ServerInfo(c *hbtp.Context) {
rts, err := Mgr.brun.ServerInfo()
if err != nil {
c.ResString(hbtp.ResStatusErr, err.Error())
return
}
c.ResJson(hbtp.ResStatusOk, rts)
}
func (HbtpRunner) PullJob(c *hbtp.Context, m *runners.ReqPullJob) {
rts, err := Mgr.brun.PullJob(m.Name, m.Plugs)
if err != nil {
c.ResString(hbtp.ResStatusErr, err.Error())
return
}
c.ResJson(hbtp.ResStatusOk, rts)
}
func (HbtpRunner) CheckCancel(c *hbtp.Context) {
buildId := c.ReqHeader().GetString("buildId")
c.ResString(hbtp.ResStatusOk, fmt.Sprintf("%t", Mgr.brun.CheckCancel(buildId)))
}
func (HbtpRunner) Update(c *hbtp.Context, m *runners.UpdateJobInfo) {
err := Mgr.brun.Update(m)
if err != nil {
c.ResString(hbtp.ResStatusErr, err.Error())
return
}
c.ResString(hbtp.ResStatusOk, "ok")
}
func (HbtpRunner) UpdateCmd(c *hbtp.Context) {
buildId := c.ReqHeader().GetString("buildId")
jobId := c.ReqHeader().GetString("jobId")
cmdId := c.ReqHeader().GetString("cmdId")
fs, err := c.ReqHeader().GetInt("fs")
code, _ := c.ReqHeader().GetInt("code")
if err != nil {
c.ResString(hbtp.ResStatusErr, err.Error())
return
}
err = Mgr.brun.UpdateCmd(buildId, jobId, cmdId, int(fs), int(code))
if err != nil {
c.ResString(hbtp.ResStatusErr, err.Error())
return
}
c.ResString(hbtp.ResStatusOk, "ok")
}
func (HbtpRunner) PushOutLine(c *hbtp.Context) {
buildId := c.ReqHeader().GetString("buildId")
jobId := c.ReqHeader().GetString("jobId")
cmdId := c.ReqHeader().GetString("cmdId")
bs := c.ReqHeader().GetString("bs")
iserr := c.ReqHeader().GetBool("iserr")
err := Mgr.brun.PushOutLine(buildId, jobId, cmdId, bs, iserr)
if err != nil {
c.ResString(hbtp.ResStatusErr, err.Error())
return
}
c.ResString(hbtp.ResStatusOk, "ok")
}
func (HbtpRunner) FindJobId(c *hbtp.Context) {
buildId := c.ReqHeader().GetString("buildId")
stgNm := c.ReqHeader().GetString("stgNm")
stpNm := c.ReqHeader().GetString("stpNm")
rts, ok := Mgr.brun.FindJobId(buildId, stgNm, stpNm)
if !ok {
c.ResString(hbtp.ResStatusNotFound, "")
return
}
c.ResString(hbtp.ResStatusOk, rts)
}
func (HbtpRunner) ReadDir(c *hbtp.Context) {
buildId := c.ReqHeader().GetString("buildId")
pth := c.ReqHeader().GetString("pth")
fs, err := c.ReqHeader().GetInt("fs")
if err != nil {
c.ResString(hbtp.ResStatusErr, err.Error())
return
}
rts, err := Mgr.brun.ReadDir(int(fs), buildId, pth)
if err != nil {
c.ResString(hbtp.ResStatusErr, err.Error())
return
}
c.ResJson(hbtp.ResStatusOk, rts)
}
func (HbtpRunner) ReadFile(c *hbtp.Context) {
buildId := c.ReqHeader().GetString("buildId")
pth := c.ReqHeader().GetString("pth")
fs, err := c.ReqHeader().GetInt("fs")
if err != nil {
c.ResString(hbtp.ResStatusErr, err.Error())
return
}
flsz, flr, err := Mgr.brun.ReadFile(int(fs), buildId, pth)
if err != nil {
c.ResString(hbtp.ResStatusErr, err.Error())
return
}
defer flr.Close()
c.ResString(hbtp.ResStatusOk, fmt.Sprintf("%d", flsz))
bts := make([]byte, 10240)
for !hbtp.EndContext(comm.Ctx) {
n, err := flr.Read(bts)
if n > 0 {
_, err = c.Conn().Write(bts[:n])
if err != nil {
break
}
}
if err != nil {
break
}
}
}
func (HbtpRunner) GetEnv(c *hbtp.Context) {
buildId := c.ReqHeader().GetString("buildId")
jobId := c.ReqHeader().GetString("jobId")
key := c.ReqHeader().GetString("key")
rts, ok := Mgr.brun.GetEnv(buildId, jobId, key)
if !ok {
c.ResString(hbtp.ResStatusNotFound, "")
return
}
c.ResString(hbtp.ResStatusOk, rts)
}
func (HbtpRunner) GenEnv(c *hbtp.Context, env utils.EnvVal) {
buildId := c.ReqHeader().GetString("buildId")
jobId := c.ReqHeader().GetString("jobId")
err := Mgr.brun.GenEnv(buildId, jobId, env)
if err != nil {
c.ResString(hbtp.ResStatusErr, err.Error())
return
}
c.ResString(hbtp.ResStatusOk, "ok")
}
func (HbtpRunner) UploadFile(c *hbtp.Context) {
buildId := c.ReqHeader().GetString("buildId")
jobId := c.ReqHeader().GetString("jobId")
dir := c.ReqHeader().GetString("dir")
pth := c.ReqHeader().GetString("pth")
fs, err := c.ReqHeader().GetInt("fs")
if err != nil {
c.ResString(hbtp.ResStatusErr, err.Error())
return
}
flw, err := Mgr.brun.UploadFile(int(fs), buildId, jobId, dir, pth)
if err != nil {
c.ResString(hbtp.ResStatusErr, err.Error())
return
}
defer flw.Close()
c.ResString(hbtp.ResStatusOk, "ok")
bts := make([]byte, 10240)
for !hbtp.EndContext(comm.Ctx) {
n, err := c.Conn().Read(bts)
if n > 0 {
_, err = flw.Write(bts[:n])
if err != nil {
break
}
}
if err != nil {
break
}
}
}
func (HbtpRunner) FindArtVersionId(c *hbtp.Context) {
buildId := c.ReqHeader().GetString("buildId")
idnt := c.ReqHeader().GetString("idnt")
name := c.ReqHeader().GetString("name")
rts, err := Mgr.brun.FindArtVersionId(buildId, idnt, name)
if err != nil {
c.ResString(hbtp.ResStatusErr, err.Error())
return
}
c.ResString(hbtp.ResStatusOk, rts)
}
func (HbtpRunner) NewArtVersionId(c *hbtp.Context) {
buildId := c.ReqHeader().GetString("buildId")
idnt := c.ReqHeader().GetString("idnt")
name := c.ReqHeader().GetString("name")
rts, err := Mgr.brun.NewArtVersionId(buildId, idnt, name)
if err != nil {
c.ResString(hbtp.ResStatusErr, err.Error())
return
}
c.ResString(hbtp.ResStatusOk, rts)
}

182
engine/jobEgn.go Normal file
View File

@ -0,0 +1,182 @@
package engine
import (
"container/list"
"errors"
"fmt"
"github.com/gokins-main/core/runtime"
"github.com/gokins-main/core/utils"
"github.com/gokins-main/gokins/comm"
"github.com/gokins-main/runner/runners"
hbtp "github.com/mgr9525/HyperByte-Transfer-Protocol"
"github.com/sirupsen/logrus"
"runtime/debug"
"sync"
"time"
)
type JobEngine struct {
tmr *utils.Timer
exelk sync.RWMutex
execs map[string]*executer
joblk sync.RWMutex
jobs map[string]*jobSync
}
type executer struct {
sync.RWMutex
plug string
tms time.Time
jobwt *list.List
}
type cmdSync struct {
sync.RWMutex
cmd *runners.CmdContent
code int
status string
started time.Time
finished time.Time
}
type jobSync struct {
sync.RWMutex
task *BuildTask
step *runtime.Step
runjb *runners.RunJob
cmdmp map[string]*cmdSync
ended bool
}
func (c *jobSync) status(stat, errs string, event ...string) {
c.Lock()
defer c.Unlock()
c.step.Status = stat
c.step.Error = errs
if len(event) > 0 {
c.step.Event = event[0]
}
}
func StartJobEngine() *JobEngine {
c := &JobEngine{
tmr: utils.NewTimer(time.Second * 30),
execs: make(map[string]*executer),
jobs: make(map[string]*jobSync),
}
go func() {
for !hbtp.EndContext(comm.Ctx) {
c.run()
time.Sleep(time.Second)
}
}()
return c
}
func (c *JobEngine) run() {
defer func() {
if err := recover(); err != nil {
logrus.Warnf("JobEngine run recover:%v", err)
logrus.Warnf("JobEngine stack:%s", string(debug.Stack()))
}
}()
if !c.tmr.Tick() {
return
}
func() {
c.exelk.RLock()
defer c.exelk.RUnlock()
for k, v := range c.execs {
v.RLock()
if time.Since(v.tms).Minutes() > 5 {
go c.rmExec(k, v)
}
v.RUnlock()
}
}()
func() {
c.joblk.Lock()
defer c.joblk.Unlock()
for k, v := range c.jobs {
if v.ended {
delete(c.jobs, k)
}
}
}()
}
func (c *JobEngine) rmExec(k string, ex *executer) {
defer func() {
if err := recover(); err != nil {
logrus.Warnf("JobEngine stopsJob recover:%v", err)
logrus.Warnf("JobEngine stack:%s", string(debug.Stack()))
}
}()
c.exelk.Lock()
defer c.exelk.Unlock()
ex.Lock()
defer ex.Unlock()
for e := ex.jobwt.Front(); e != nil; e = e.Next() {
job := e.Value.(*jobSync)
job.ended = true
}
delete(c.execs, k)
}
func (c *JobEngine) Put(job *jobSync) error {
if job == nil || job.step.Step == "" {
return errors.New("step plugin empty")
}
c.exelk.RLock()
e, ok := c.execs[job.step.Step]
c.exelk.RUnlock()
if !ok {
return fmt.Errorf("Not Found Plugin:%s", job.step.Step)
}
e.Lock()
defer e.Unlock()
e.jobwt.PushBack(job)
return nil
}
func (c *JobEngine) Pull(name string, plugs []string) *runners.RunJob {
for _, v := range plugs {
if v == "" {
continue
}
c.exelk.RLock()
ex, ok := c.execs[v]
c.exelk.RUnlock()
if !ok {
ex = &executer{
plug: v,
tms: time.Now(),
jobwt: list.New(),
}
c.exelk.Lock()
c.execs[v] = ex
c.exelk.Unlock()
}
var job *jobSync
ex.Lock()
ex.tms = time.Now()
e := ex.jobwt.Front()
if e != nil {
job = e.Value.(*jobSync)
ex.jobwt.Remove(e)
c.joblk.Lock()
c.jobs[job.step.Id] = job
c.joblk.Unlock()
}
ex.Unlock()
if job != nil {
return job.runjb
}
}
return nil
}
/*func (c *JobEngine) GetJob(id string) (*jobSync, bool) {
if id == "" {
return nil, false
}
c.joblk.RLock()
defer c.joblk.RUnlock()
job, ok := c.jobs[id]
return job, ok
}*/

82
engine/mgr.go Normal file
View File

@ -0,0 +1,82 @@
package engine
import (
"os"
"path/filepath"
"runtime/debug"
"time"
"github.com/gokins-main/core/common"
"github.com/gokins-main/gokins/comm"
"github.com/gokins-main/runner/runners"
hbtp "github.com/mgr9525/HyperByte-Transfer-Protocol"
"github.com/sirupsen/logrus"
)
var Mgr = &Manager{}
type Manager struct {
buildEgn *BuildEngine
jobEgn *JobEngine
shellRun *runners.Engine
brun *baseRunner
hrun *HbtpRunner
timerEgn *TimerEngine
}
func Start() error {
Mgr.buildEgn = StartBuildEngine()
Mgr.jobEgn = StartJobEngine()
Mgr.timerEgn = StartTimerEngine()
Mgr.brun = &baseRunner{}
Mgr.hrun = &HbtpRunner{}
//runners
comm.Cfg.Server.Shells = append(comm.Cfg.Server.Shells, "shell@ssh")
if len(comm.Cfg.Server.Shells) > 0 {
Mgr.shellRun = runners.NewEngine(runners.Config{
Name: "mainRunner",
Workspace: filepath.Join(comm.WorkPath, common.PathRunner),
Plugin: comm.Cfg.Server.Shells,
}, Mgr.brun)
go func() {
err := Mgr.shellRun.Run(comm.Ctx)
if err != nil {
logrus.Errorf("runner err:%v", err)
}
}()
}
go func() {
os.RemoveAll(filepath.Join(comm.WorkPath, common.PathTmp))
for !hbtp.EndContext(comm.Ctx) {
//Mgr.run()
time.Sleep(time.Millisecond * 100)
}
Mgr.buildEgn.Stop()
if Mgr.shellRun != nil {
Mgr.shellRun.Stop()
}
}()
return nil
}
func (c *Manager) run() {
defer func() {
if err := recover(); err != nil {
logrus.Warnf("Manager run recover:%v", err)
logrus.Warnf("Manager stack:%s", string(debug.Stack()))
}
}()
}
func (c *Manager) BuildEgn() *BuildEngine {
return c.buildEgn
}
func (c *Manager) HRun() *HbtpRunner {
return c.hrun
}
func (c *Manager) TimerEng() *TimerEngine {
return c.timerEgn
}

380
engine/runner.go Normal file
View File

@ -0,0 +1,380 @@
package engine
import (
"encoding/json"
"errors"
"fmt"
"github.com/gokins-main/gokins/service"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
"time"
"github.com/gokins-main/core/common"
"github.com/gokins-main/core/utils"
"github.com/gokins-main/gokins/bean"
"github.com/gokins-main/gokins/comm"
"github.com/gokins-main/gokins/model"
"github.com/gokins-main/runner/runners"
hbtp "github.com/mgr9525/HyperByte-Transfer-Protocol"
)
type baseRunner struct{}
func (c *baseRunner) ServerInfo() (*runners.ServerInfo, error) {
return &runners.ServerInfo{
WebHost: comm.Cfg.Server.Host,
DownToken: comm.Cfg.Server.DownToken,
}, nil
}
func (c *baseRunner) PullJob(name string, plugs []string) (*runners.RunJob, error) {
tms := time.Now()
for time.Since(tms).Seconds() < 5 {
v := Mgr.jobEgn.Pull(name, plugs)
if v != nil {
return v, nil
}
}
return nil, errors.New("not found")
}
func (c *baseRunner) CheckCancel(buildId string) bool {
v, ok := Mgr.buildEgn.Get(buildId)
if !ok {
return true
}
return v.stopd()
}
func (c *baseRunner) Update(m *runners.UpdateJobInfo) error {
tsk, ok := Mgr.buildEgn.Get(m.BuildId)
if !ok {
return errors.New("not found build")
}
job, ok := tsk.GetJob(m.JobId)
if !ok {
return errors.New("not found job")
}
tsk.UpJob(job, m.Status, m.Error, m.ExitCode)
return nil
}
func (c *baseRunner) UpdateCmd(buildId, jobId, cmdId string, fs, code int) error {
tsk, ok := Mgr.buildEgn.Get(buildId)
if !ok {
return errors.New("not found build")
}
job, ok := tsk.GetJob(jobId)
if !ok {
return errors.New("not found job")
}
job.RLock()
cmd, ok := job.cmdmp[cmdId]
job.RUnlock()
if !ok {
return errors.New("not found cmd")
}
tsk.UpJobCmd(cmd, fs, code)
return nil
}
func (c *baseRunner) PushOutLine(buildId, jobId, cmdId, bs string, iserr bool) error {
tsk, ok := Mgr.buildEgn.Get(buildId)
if !ok {
return errors.New("not found build")
}
job, ok := tsk.GetJob(jobId)
if !ok {
return errors.New("not found job")
}
bts, err := json.Marshal(&bean.LogOutJson{
Id: cmdId,
Content: bs,
Times: time.Now(),
Errs: iserr,
})
if err != nil {
return err
}
dir := filepath.Join(comm.WorkPath, common.PathBuild, job.step.BuildId, common.PathJobs, job.step.Id)
logpth := filepath.Join(dir, "build.log")
os.MkdirAll(dir, 0755)
logfl, err := os.OpenFile(logpth, os.O_CREATE|os.O_APPEND|os.O_RDWR, 0644)
if err != nil {
return err
}
defer logfl.Close()
logfl.Write(bts)
logfl.WriteString("\n")
return nil
}
func (c *baseRunner) FindJobId(buildId, stgNm, stpNm string) (string, bool) {
if buildId == "" || stgNm == "" || stpNm == "" {
return "", false
}
build, ok := Mgr.buildEgn.Get(buildId)
if !ok {
return "", false
}
build.staglk.RLock()
defer build.staglk.RUnlock()
stg, ok := build.stages[stgNm]
if !ok {
return "", false
}
stg.RLock()
defer stg.RUnlock()
for _, v := range stg.jobs {
if v.step.Name == stpNm {
return v.step.Id, true
}
}
return "", false
}
func (c *baseRunner) ReadDir(fs int, buildId string, pth string) ([]*runners.DirEntry, error) {
if buildId == "" || pth == "" {
return nil, errors.New("param err")
}
build, ok := Mgr.buildEgn.Get(buildId)
if !ok {
return nil, errors.New("not found build")
}
pths := ""
if fs == 1 {
pths = filepath.Join(build.repoPaths, pth)
} else if fs == 2 {
pths = filepath.Join(comm.WorkPath, common.PathArtifacts, pth)
} else if fs == 3 {
pths = filepath.Join(build.buildPath, common.PathJobs, pth)
}
fls, err := os.ReadDir(pths)
if err != nil {
return nil, err
}
var ls []*runners.DirEntry
for _, v := range fls {
e := &runners.DirEntry{
Name: v.Name(),
IsDir: v.IsDir(),
}
ifo, err := v.Info()
if err == nil {
e.Size = ifo.Size()
}
ls = append(ls, e)
}
return ls, nil
}
func (c *baseRunner) ReadFile(fs int, buildId string, pth string) (int64, io.ReadCloser, error) {
if buildId == "" || pth == "" {
return 0, nil, errors.New("param err")
}
build, ok := Mgr.buildEgn.Get(buildId)
if !ok {
return 0, nil, errors.New("not found build")
}
pths := ""
if fs == 1 {
pths = filepath.Join(build.repoPaths, pth)
} else if fs == 2 {
pths = filepath.Join(comm.WorkPath, common.PathArtifacts, pth)
} else if fs == 3 {
pths = filepath.Join(build.buildPath, common.PathJobs, pth)
}
if pths == "" {
return 0, nil, errors.New("path param err")
}
stat, err := os.Stat(pths)
if err != nil {
return 0, nil, err
}
fl, err := os.Open(pths)
if err != nil {
return 0, nil, err
}
return stat.Size(), fl, nil
}
func (c *baseRunner) GetEnv(buildId, jobId, key string) (string, bool) {
if jobId == "" || key == "" {
return "", false
}
tsk, ok := Mgr.buildEgn.Get(buildId)
if !ok {
return "", false
}
job, ok := tsk.GetJob(jobId)
if !ok {
return "", false
}
dir := filepath.Join(comm.WorkPath, common.PathBuild, job.step.BuildId, common.PathJobs, job.step.Id)
bts, err := ioutil.ReadFile(filepath.Join(dir, "build.env"))
if err != nil {
return "", false
}
mp := hbtp.NewMaps(bts)
v, ok := mp.Get(key)
if !ok {
return "", false
}
switch v.(type) {
case string:
return v.(string), true
}
return fmt.Sprintf("%v", v), true
}
func (c *baseRunner) GenEnv(buildId, jobId string, env utils.EnvVal) error {
if jobId == "" || env == nil {
return errors.New("param err")
}
tsk, ok := Mgr.buildEgn.Get(buildId)
if !ok {
return errors.New("not found build")
}
job, ok := tsk.GetJob(jobId)
if !ok {
return errors.New("not found job")
}
bts, err := json.Marshal(env)
if err != nil {
return err
}
dir := filepath.Join(comm.WorkPath, common.PathBuild, job.step.BuildId, common.PathJobs, job.step.Id)
err = ioutil.WriteFile(filepath.Join(dir, "build.env"), bts, 0640)
return err
}
func (c *baseRunner) UploadFile(fs int, buildId, jobId string, dir, pth string) (io.WriteCloser, error) {
if jobId == "" || pth == "" {
return nil, errors.New("param err")
}
tsk, ok := Mgr.buildEgn.Get(buildId)
if !ok {
return nil, errors.New("not found build")
}
job, ok := tsk.GetJob(jobId)
if !ok {
return nil, errors.New("not found job")
}
pths := ""
if fs == 1 {
pths = filepath.Join(comm.WorkPath, common.PathArtifacts, dir, pth)
} else if fs == 2 {
pths = filepath.Join(job.task.buildPath, common.PathJobs, job.step.Id, common.PathArts, dir, pth)
}
if pths == "" {
return nil, errors.New("path param err")
}
dirs := filepath.Dir(pths)
os.MkdirAll(dirs, 0750)
fl, err := os.OpenFile(pths, os.O_CREATE|os.O_RDWR, 0640)
/*if err!=nil{
return nil,err
}*/
return fl, err
}
func (c *baseRunner) FindArtVersionId(buildId, idnt string, names string) (string, error) {
tnms := strings.Split(strings.TrimSpace(names), "@")
name := tnms[0]
vers := ""
if len(tnms) > 1 {
vers = tnms[1]
}
if buildId == "" || idnt == "" || name == "" {
return "", errors.New("param err")
}
build, ok := Mgr.buildEgn.Get(buildId)
if !ok {
return "", errors.New("not found build")
}
arty := &model.TArtifactory{}
ok, _ = comm.Db.Where("deleted!=1 and identifier=? and org_id in (select org_id from t_org_pipe where pipe_id=?)",
idnt, build.build.PipelineId).Get(arty)
if !ok {
return "", errors.New("not found artifactory")
}
pv := &model.TPipelineVersion{}
ok = service.GetIdOrAid(build.build.PipelineVersionId, pv)
if !ok {
return "", errors.New("not found pv")
}
usr := &model.TUser{}
ok = service.GetIdOrAid(pv.Uid, usr)
if !ok {
return "", errors.New("not found user")
}
perm := service.NewOrgPerm(usr, arty.OrgId)
if !perm.CanExec() {
return "", fmt.Errorf("user put '%s' no permission", idnt)
}
artp := &model.TArtifactPackage{}
ok, _ = comm.Db.Where("deleted!=1 and repo_id=? and name=?", arty.Id, name).Get(artp)
if !ok {
return "", fmt.Errorf("not found artifact '%s'", names)
}
artv := &model.TArtifactVersion{}
ses := comm.Db.Where("package_id=?", artp.Id)
if vers != "" {
ses.And("version=? or sha=?", vers)
}
ok, _ = ses.OrderBy("aid DESC").Get(artv)
if !ok {
return "", fmt.Errorf("not found artifacts '%s'", names)
}
return artv.Id, nil
}
func (c *baseRunner) NewArtVersionId(buildId, idnt string, name string) (string, error) {
name = strings.Split(strings.TrimSpace(name), "@")[0]
if buildId == "" || idnt == "" || name == "" {
return "", errors.New("param err")
}
build, ok := Mgr.buildEgn.Get(buildId)
if !ok {
return "", errors.New("not found build")
}
arty := &model.TArtifactory{}
ok, _ = comm.Db.Where("deleted!=1 and identifier=? and org_id in (select org_id from t_org_pipe where pipe_id=?)",
idnt, build.build.PipelineId).Get(arty)
if !ok {
return "", errors.New("not found artifactory")
}
if arty.Disabled == 1 {
return "", errors.New("artifactory already disabled")
}
artp := &model.TArtifactPackage{}
ok, _ = comm.Db.Where("deleted!=1 and repo_id=? and name=?", arty.Id, name).Get(artp)
if !ok {
artp.Id = utils.NewXid()
artp.RepoId = arty.Id
artp.Name = name
artp.Created = time.Now()
artp.Updated = time.Now()
_, err := comm.Db.InsertOne(artp)
if err != nil {
return "", err
}
}
artv := &model.TArtifactVersion{
Id: utils.NewXid(),
RepoId: arty.Id,
PackageId: artp.Id,
Name: artp.Name,
Preview: 1,
Created: time.Now(),
Updated: time.Now(),
}
artv.Sha = artv.Id
_, err := comm.Db.InsertOne(artv)
if err != nil {
return "", err
}
return artv.Id, nil
}

206
engine/timermgr.go Normal file
View File

@ -0,0 +1,206 @@
package engine
import (
"encoding/json"
"errors"
"github.com/gokins-main/core/common"
"github.com/gokins-main/gokins/comm"
"github.com/gokins-main/gokins/model"
"github.com/gokins-main/gokins/service"
hbtp "github.com/mgr9525/HyperByte-Transfer-Protocol"
"github.com/sirupsen/logrus"
"runtime/debug"
"sync"
"time"
)
type TimerEngine struct {
tasklk sync.RWMutex
tasks map[string]*timerExec
}
type timerExec struct {
tt *model.TTrigger
typ int64
tms time.Time
tick time.Time
}
func StartTimerEngine() *TimerEngine {
c := &TimerEngine{
tasks: make(map[string]*timerExec),
}
go func() {
c.refresh()
for !hbtp.EndContext(comm.Ctx) {
c.run()
time.Sleep(time.Millisecond * 10)
}
}()
return c
}
func (c *TimerEngine) run() {
defer func() {
if err := recover(); err != nil {
logrus.Warnf("TimerEngine run recover:%v", err)
logrus.Warnf("TimerEngine stack:%s", string(debug.Stack()))
}
}()
c.tasklk.RLock()
defer c.tasklk.RUnlock()
for _, v := range c.tasks {
c.execItem(v)
}
}
func (c *TimerEngine) execItem(v *timerExec) {
defer func() {
if err := recover(); err != nil {
logrus.Warnf("TimerEngine execItem recover:%v", err)
logrus.Warnf("TimerEngine stack:%s", string(debug.Stack()))
}
}()
if time.Since(v.tick) > 0 {
now := time.Now()
logrus.Debugf("Timer(%s[%d]:%s) tick on:%s", v.tt.Name, v.typ, now.Format(common.TimeFmt), v.tick.Format(common.TimeFmt))
switch v.typ {
case 0:
go c.Delete(v.tt.Id)
time.Sleep(time.Millisecond * 10)
case 1:
v.tick = now.Add(time.Minute)
case 2:
v.tick = now.Add(time.Hour)
case 3:
v.tick = now.Add(time.Hour * 24)
case 4:
v.tick = now.Add(time.Hour * 24 * 7)
case 5:
v.tick = now.Add(time.Hour * 24 * 30)
}
rb, err := service.TriggerTimer(v.tt)
if err != nil {
logrus.Errorf("TriggerTimer err:%v", err)
} else {
Mgr.BuildEgn().Put(rb)
}
}
}
func (c *TimerEngine) refresh() {
defer func() {
if err := recover(); err != nil {
logrus.Warnf("TimerEngine refresh recover:%v", err)
logrus.Warnf("TimerEngine stack:%s", string(debug.Stack()))
}
}()
var ls []*model.TTrigger
comm.Db.Where("enabled = 1 AND types = 'timer'").Find(&ls)
c.tasklk.Lock()
defer c.tasklk.Unlock()
for _, v := range ls {
err := c.resetOne(v)
if err != nil {
logrus.Errorf("TimerEngine resetOne err:%v", err)
}
}
}
func (c *TimerEngine) resetOne(tmr *model.TTrigger) error {
defer func() {
if err := recover(); err != nil {
logrus.Warnf("TimerEngine refresh recover:%v", err)
logrus.Warnf("TimerEngine stack:%s", string(debug.Stack()))
}
}()
if tmr.Types != "timer" {
return errors.New("type is err:" + tmr.Types)
}
mp := hbtp.Map{}
err := json.Unmarshal([]byte(tmr.Params), &mp)
if err != nil {
return err
}
typ, err := mp.GetInt("timerType")
if err != nil {
return err
}
dates := mp.GetString("dates")
tms, err := time.ParseInLocation(time.RFC3339Nano, dates, time.Local)
if err != nil {
return err
}
switch typ {
case 0:
if time.Since(tms) < 0 {
t, ok := c.tasks[tmr.Id]
if !ok {
t = &timerExec{
tt: tmr,
}
c.tasks[tmr.Id] = t
}
t.typ = typ
t.tms = tms
t.tick = tms
logrus.Debugf("Timer add(%s[%d]:%s) tick on:%s", tmr.Name, typ, tms.Format(common.TimeFmt), t.tick.Format(common.TimeFmt))
}
case 1, 2, 3, 4, 5:
now := time.Now()
t, ok := c.tasks[tmr.Id]
if !ok {
t = &timerExec{
tt: tmr,
}
c.tasks[tmr.Id] = t
}
t.typ = typ
t.tms = tms
switch typ {
case 1:
t.tick = time.Date(now.Year(), now.Month(), now.Day(), now.Hour(), now.Minute(), tms.Second(), 0, time.Local)
if time.Since(t.tick) > 0 {
t.tick = t.tick.Add(time.Minute)
}
case 2:
t.tick = time.Date(now.Year(), now.Month(), now.Day(), now.Hour(), tms.Minute(), tms.Second(), 0, time.Local)
if time.Since(t.tick) > 0 {
t.tick = t.tick.Add(time.Hour)
}
case 3:
t.tick = time.Date(now.Year(), now.Month(), now.Day(), tms.Hour(), tms.Minute(), tms.Second(), 0, time.Local)
if time.Since(t.tick) > 0 {
t.tick = t.tick.Add(time.Hour * 24)
}
case 4:
t.tick = time.Date(now.Year(), now.Month(), tms.Day(), tms.Hour(), tms.Minute(), tms.Second(), 0, time.Local)
if time.Since(t.tick) > 0 {
t.tick = t.tick.Add(time.Hour * 24 * 7)
}
case 5:
t.tick = time.Date(now.Year(), now.Month(), tms.Day(), tms.Hour(), tms.Minute(), tms.Second(), 0, time.Local)
if time.Since(t.tick) > 0 {
t.tick = t.tick.Add(time.Hour * 24 * 30)
}
}
logrus.Debugf("Timer add(%s[%d]:%s) tick on:%s", tmr.Name, typ, tms.Format(common.TimeFmt), t.tick.Format(common.TimeFmt))
}
return nil
}
func (c *TimerEngine) Refresh(tmrid string) error {
if tmrid == "" {
return errors.New("param err")
}
tmr := &model.TTrigger{}
ok, _ := comm.Db.Where("id=?", tmrid).Get(tmr)
if !ok || tmr.Enabled != 1 {
c.Delete(tmrid)
return errors.New("not found")
}
return c.resetOne(tmr)
}
func (c *TimerEngine) Delete(tmrid string) {
c.tasklk.Lock()
delete(c.tasks, tmrid)
c.tasklk.Unlock()
}

37
engine/timermgr_test.go Normal file
View File

@ -0,0 +1,37 @@
package engine
import (
"fmt"
"sync"
"testing"
"time"
)
func TestTimer(t *testing.T) {
tm := time.NewTimer(time.Second * 5)
select {
case <-tm.C:
fmt.Println(<-tm.C)
default:
fmt.Println("default")
}
fmt.Println(tm)
}
func TestSync(t *testing.T) {
lk := sync.RWMutex{}
go func() {
for {
lk.Lock()
fmt.Println(1)
lk.Unlock()
}
}()
go func() {
for {
lk.Lock()
fmt.Println(2)
lk.Unlock()
}
}()
}

24
go.mod
View File

@ -1,14 +1,20 @@
module gokins
module github.com/gokins-main/gokins
go 1.14
go 1.15
require (
github.com/boltdb/bolt v1.3.1
github.com/dgrijalva/jwt-go v3.2.0+incompatible
github.com/dlclark/regexp2 v1.4.0 // indirect
github.com/dop251/goja v0.0.0-20201020185350-bf18fe8c88fa
github.com/gin-gonic/gin v1.6.3
github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect
github.com/go-xorm/xorm v0.7.9
github.com/mattn/go-sqlite3 v1.14.0
github.com/mgr9525/go-ruisutil v1.0.8-0.20200702070004-0f4cf34379ef
github.com/gin-gonic/gin v1.7.2
github.com/go-git/go-git/v5 v5.4.2
github.com/go-sql-driver/mysql v1.5.0
github.com/gokins-main/core v0.0.0-20210718160656-01ed57f06c58
github.com/gokins-main/runner v0.0.0-20210722023302-e6ba8889bce8
github.com/golang-migrate/migrate/v4 v4.14.1
github.com/mgr9525/HyperByte-Transfer-Protocol v1.1.5
github.com/sirupsen/logrus v1.8.1
gopkg.in/alecthomas/kingpin.v2 v2.2.6
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c
xorm.io/builder v0.3.8
xorm.io/xorm v1.1.0
)

877
go.sum

File diff suppressed because it is too large Load Diff

14
hook/action.go Normal file
View File

@ -0,0 +1,14 @@
package hook
type Action int
const (
ActionOpen = "open"
ActionOpened = "opened"
ActionClose = "close"
ActionCreate = "create"
ActionDelete = "delete"
ActionSync = "sync"
ActionUpdate = "update"
ActionSynchronize = "synchronize"
)

48
hook/event.go Normal file
View File

@ -0,0 +1,48 @@
package hook
//触发事件
const (
// EVENTS_TYPE_COMMENT 评论事件
EVENTS_TYPE_COMMENT = "comment"
// EVENTS_TYPE_PR pull request事件
EVENTS_TYPE_PR = "pr"
// EVENTS_TYPE_PUSH push事件
EVENTS_TYPE_PUSH = "push"
// EVENTS_TYPE_BUILD 手动运行事件
EVENTS_TYPE_BUILD = "build"
// EVENTS_TYPE_REBUILD 手动重新构建
EVENTS_TYPE_REBUILD = "rebuild"
)
const (
GITEE_EVENT = "X-Gitee-Event"
GITEE_EVENT_PUSH = "Push Hook"
GITEE_EVENT_NOTE = "Note Hook"
GITEE_EVENT_PR = "Merge Request Hook"
GITEE_EVENT_PR_ACTION_OPEN = "open"
GITEE_EVENT_PR_ACTION_UPDATE = "update"
GITEE_EVENT_PR_ACTION_COMMENT = "comment"
)
const (
GITHUB_EVENT = "X-GitHub-Event"
GITHUB_EVENT_ISSUE_COMMENT = "issue_comment"
GITHUB_EVENT_PUSH = "push"
GITHUB_EVENT_PR = "pull_request"
GITHUB_EVENT_PR_ACTION_OPEN = "open"
GITHUB_EVENT_PR_ACTION_UPDATE = "update"
GITHUB_EVENT_PR_ACTION_COMMENT = "comment"
)
const (
GITLAB_EVENT = "X-Gitlab-Event"
GITLAB_EVENT_PUSH = "Push Hook"
GITLAB_EVENT_PR = "Merge Request Hook"
GITLAB_EVENT_NOTE = "Note Hook"
)
const (
GITEA_EVENT = "X-Gitea-Event"
GITEA_EVENT_PUSH = "push"
GITEA_EVENT_PR = "pull_request"
GITEA_EVENT_NOTE = "issue_comment"
)

19
hook/git.go Normal file
View File

@ -0,0 +1,19 @@
package hook
type (
Reference struct {
Name string
Path string
Sha string
}
Commit struct {
Sha string
Message string
Link string
}
User struct {
UserName string
}
)

1118
hook/gitea/gitea.go Normal file

File diff suppressed because it is too large Load Diff

1415
hook/gitee/gitee.go Normal file

File diff suppressed because it is too large Load Diff

1505
hook/github/github.go Normal file

File diff suppressed because it is too large Load Diff

613
hook/gitlab/gitlab.go Normal file
View File

@ -0,0 +1,613 @@
package gitlab
import (
"crypto/hmac"
"crypto/sha1"
"crypto/sha256"
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"github.com/gokins-main/gokins/hook"
"github.com/sirupsen/logrus"
"hash"
"io"
"io/ioutil"
"net/http"
"runtime/debug"
"strconv"
"strings"
"time"
)
func Parse(req *http.Request, secret string) (hook.WebHook, error) {
defer func() {
if err := recover(); err != nil {
logrus.Warnf("WebhookService Parse err:%+v", err)
logrus.Warnf("%s", string(debug.Stack()))
}
}()
data, err := ioutil.ReadAll(
io.LimitReader(req.Body, 10000000),
)
if err != nil {
return nil, err
}
var wb hook.WebHook
switch req.Header.Get(hook.GITLAB_EVENT) {
case hook.GITLAB_EVENT_PUSH:
wb, err = parsePushHook(data)
case hook.GITLAB_EVENT_NOTE:
wb, err = parseCommentHook(data)
case hook.GITLAB_EVENT_PR:
wb, err = parsePullRequestHook(data)
default:
return nil, errors.New(fmt.Sprintf("hook含有未知的header:%v", req.Header.Get(hook.GITEE_EVENT)))
}
if err != nil {
return nil, err
}
sig := req.Header.Get("X-Gitlab-Token")
if secret != sig {
return wb, errors.New("密钥不正确")
}
return wb, nil
}
func Validate(h func() hash.Hash, message, key []byte, signature string) bool {
decoded, err := hex.DecodeString(signature)
if err != nil {
return false
}
return validate(h, message, key, decoded)
}
func validatePrefix(message, key []byte, signature string) bool {
parts := strings.Split(signature, "=")
if len(parts) != 2 {
return false
}
switch parts[0] {
case "sha1":
return Validate(sha1.New, message, key, parts[1])
case "sha256":
return Validate(sha256.New, message, key, parts[1])
default:
return false
}
}
func validate(h func() hash.Hash, message, key, signature []byte) bool {
mac := hmac.New(h, key)
mac.Write(message)
sum := mac.Sum(nil)
return hmac.Equal(signature, sum)
}
func parseCommentHook(data []byte) (*hook.PullRequestCommentHook, error) {
gp := new(gitlabCommentHook)
err := json.Unmarshal(data, gp)
if err != nil {
return nil, err
}
return convertCommentHook(gp)
}
func parsePullRequestHook(data []byte) (*hook.PullRequestHook, error) {
gp := new(gitlabPRHook)
err := json.Unmarshal(data, gp)
if err != nil {
return nil, err
}
if gp.ObjectAttributes.Action != "" {
if gp.ObjectAttributes.Action != hook.ActionOpen && gp.ObjectAttributes.Action != hook.ActionUpdate {
return nil, fmt.Errorf("action is %v", gp.ObjectAttributes.Action)
}
}
return convertPullRequestHook(gp), nil
}
func parsePushHook(data []byte) (*hook.PushHook, error) {
gp := new(gitlabPushHook)
err := json.Unmarshal(data, gp)
if err != nil {
return nil, err
}
return convertPushHook(gp), nil
}
func convertPullRequestHook(gp *gitlabPRHook) *hook.PullRequestHook {
return &hook.PullRequestHook{
Action: gp.ObjectAttributes.Action,
Repo: hook.Repository{
Ref: gp.ObjectAttributes.SourceBranch,
Sha: gp.ObjectAttributes.LastCommit.Id,
CloneURL: gp.ObjectAttributes.Source.HttpUrl,
Branch: gp.ObjectAttributes.SourceBranch,
Description: gp.ObjectAttributes.Source.Description,
FullName: gp.ObjectAttributes.Source.PathWithNamespace,
GitHttpURL: gp.ObjectAttributes.Source.GitHttpUrl,
GitShhURL: gp.ObjectAttributes.Source.SshUrl,
GitURL: gp.ObjectAttributes.Source.Url,
HtmlURL: gp.ObjectAttributes.Source.WebUrl,
SshURL: gp.ObjectAttributes.Source.SshUrl,
Name: gp.ObjectAttributes.Source.Name,
URL: gp.ObjectAttributes.Source.Url,
Owner: gp.User.Username,
RepoType: "gitlab",
RepoOpenid: strconv.Itoa(gp.Project.Id),
},
TargetRepo: hook.Repository{
Ref: gp.ObjectAttributes.TargetBranch,
Sha: gp.ObjectAttributes.LastCommit.Id,
CloneURL: gp.ObjectAttributes.Target.HttpUrl,
Branch: gp.ObjectAttributes.TargetBranch,
Description: gp.ObjectAttributes.Target.Description,
FullName: gp.ObjectAttributes.Target.PathWithNamespace,
GitHttpURL: gp.ObjectAttributes.Target.GitHttpUrl,
GitShhURL: gp.ObjectAttributes.Target.SshUrl,
GitURL: gp.ObjectAttributes.Target.Url,
HtmlURL: gp.ObjectAttributes.Target.WebUrl,
SshURL: gp.ObjectAttributes.Target.SshUrl,
Name: gp.ObjectAttributes.Target.Name,
URL: gp.ObjectAttributes.Target.Url,
Owner: gp.User.Username,
RepoType: "gitlab",
RepoOpenid: strconv.Itoa(gp.Project.Id),
},
Sender: hook.User{
UserName: gp.User.Username,
},
PullRequest: hook.PullRequest{
Number: gp.ObjectAttributes.Iid,
Title: gp.ObjectAttributes.Title,
Base: hook.Reference{
Name: gp.ObjectAttributes.Source.Name,
Sha: gp.ObjectAttributes.LastCommit.Id,
},
Head: hook.Reference{
Name: gp.ObjectAttributes.Target.Name,
Sha: gp.ObjectAttributes.LastCommit.Id,
},
Author: hook.User{
UserName: gp.User.Username,
},
Created: time.Time{},
Updated: time.Time{},
},
}
}
func convertPushHook(gp *gitlabPushHook) *hook.PushHook {
branch := ""
if gp.Ref != "" {
if len(strings.Split(gp.Ref, "/")) > 2 {
branch = strings.Split(gp.Ref, "/")[2]
}
}
return &hook.PushHook{
Ref: gp.Ref,
Repo: hook.Repository{
Ref: gp.Ref,
Sha: gp.After,
CloneURL: gp.Project.HttpUrl,
Branch: branch,
Description: gp.Repository.Description,
FullName: gp.Project.PathWithNamespace,
GitHttpURL: gp.Repository.GitHttpUrl,
GitShhURL: gp.Repository.GitSshUrl,
GitURL: gp.Repository.Url,
SshURL: gp.Project.SshUrl,
Name: gp.Repository.Name,
URL: gp.Repository.Url,
Owner: gp.UserUsername,
RepoType: "gitlab",
RepoOpenid: strconv.Itoa(gp.ProjectId),
},
Before: gp.Before,
After: gp.After,
Sender: hook.User{
UserName: gp.UserUsername,
},
}
}
func convertCommentHook(gp *gitlabCommentHook) (*hook.PullRequestCommentHook, error) {
return &hook.PullRequestCommentHook{
Action: hook.EVENTS_TYPE_COMMENT,
Repo: hook.Repository{
Ref: gp.MergeRequest.SourceBranch,
Sha: gp.MergeRequest.LastCommit.Id,
CloneURL: gp.MergeRequest.Source.HttpUrl,
Branch: gp.MergeRequest.SourceBranch,
Description: gp.MergeRequest.Source.Description,
FullName: gp.MergeRequest.Source.PathWithNamespace,
GitHttpURL: gp.MergeRequest.Source.GitHttpUrl,
GitShhURL: gp.MergeRequest.Source.SshUrl,
GitURL: gp.MergeRequest.Source.Url,
HtmlURL: gp.MergeRequest.Source.WebUrl,
SshURL: gp.MergeRequest.Source.SshUrl,
Name: gp.MergeRequest.Source.Name,
URL: gp.MergeRequest.Source.Url,
Owner: gp.User.Username,
RepoType: "gitlab",
RepoOpenid: strconv.Itoa(gp.Project.Id),
},
TargetRepo: hook.Repository{
Ref: gp.MergeRequest.TargetBranch,
Sha: gp.MergeRequest.LastCommit.Id,
CloneURL: gp.MergeRequest.Target.HttpUrl,
Branch: gp.MergeRequest.TargetBranch,
Description: gp.MergeRequest.Target.Description,
FullName: gp.MergeRequest.Target.PathWithNamespace,
GitHttpURL: gp.MergeRequest.Target.GitHttpUrl,
GitShhURL: gp.MergeRequest.Target.SshUrl,
GitURL: gp.MergeRequest.Target.Url,
HtmlURL: gp.MergeRequest.Target.WebUrl,
SshURL: gp.MergeRequest.Target.SshUrl,
Name: gp.MergeRequest.Target.Name,
URL: gp.MergeRequest.Target.Url,
Owner: gp.User.Username,
RepoType: "gitlab",
RepoOpenid: strconv.Itoa(gp.Project.Id),
},
Comment: hook.Comment{
Body: gp.ObjectAttributes.Note,
Author: hook.User{
UserName: gp.User.Username,
},
},
PullRequest: hook.PullRequest{
Number: gp.MergeRequest.Iid,
Title: gp.MergeRequest.Title,
Base: hook.Reference{
Name: gp.MergeRequest.Source.Name,
Sha: gp.MergeRequest.LastCommit.Id,
},
Head: hook.Reference{
Name: gp.MergeRequest.Target.Name,
Sha: gp.MergeRequest.LastCommit.Id,
},
Author: hook.User{
UserName: gp.User.Username,
},
Created: time.Time{},
Updated: time.Time{},
},
Sender: hook.User{
UserName: gp.User.Username,
},
}, nil
}
type gitlabPushHook struct {
ObjectKind string `json:"object_kind"`
EventName string `json:"event_name"`
Before string `json:"before"`
After string `json:"after"`
Ref string `json:"ref"`
CheckoutSha string `json:"checkout_sha"`
Message interface{} `json:"message"`
UserId int `json:"user_id"`
UserName string `json:"user_name"`
UserUsername string `json:"user_username"`
UserEmail string `json:"user_email"`
UserAvatar string `json:"user_avatar"`
ProjectId int `json:"project_id"`
Project struct {
Id int `json:"id"`
Name string `json:"name"`
Description string `json:"description"`
WebUrl string `json:"web_url"`
AvatarUrl interface{} `json:"avatar_url"`
GitSshUrl string `json:"git_ssh_url"`
GitHttpUrl string `json:"git_http_url"`
Namespace string `json:"namespace"`
VisibilityLevel int `json:"visibility_level"`
PathWithNamespace string `json:"path_with_namespace"`
DefaultBranch string `json:"default_branch"`
CiConfigPath string `json:"ci_config_path"`
Homepage string `json:"homepage"`
Url string `json:"url"`
SshUrl string `json:"ssh_url"`
HttpUrl string `json:"http_url"`
} `json:"project"`
Commits []struct {
Id string `json:"id"`
Message string `json:"message"`
Title string `json:"title"`
Timestamp time.Time `json:"timestamp"`
Url string `json:"url"`
Author struct {
Name string `json:"name"`
Email string `json:"email"`
} `json:"author"`
Added []interface{} `json:"added"`
Modified []string `json:"modified"`
Removed []interface{} `json:"removed"`
} `json:"commits"`
TotalCommitsCount int `json:"total_commits_count"`
PushOptions struct {
} `json:"push_options"`
Repository struct {
Name string `json:"name"`
Url string `json:"url"`
Description string `json:"description"`
Homepage string `json:"homepage"`
GitHttpUrl string `json:"git_http_url"`
GitSshUrl string `json:"git_ssh_url"`
VisibilityLevel int `json:"visibility_level"`
} `json:"repository"`
}
type gitlabPRHook struct {
ObjectKind string `json:"object_kind"`
EventType string `json:"event_type"`
User struct {
Id int `json:"id"`
Name string `json:"name"`
Username string `json:"username"`
AvatarUrl string `json:"avatar_url"`
Email string `json:"email"`
} `json:"user"`
Project struct {
Id int `json:"id"`
Name string `json:"name"`
Description string `json:"description"`
WebUrl string `json:"web_url"`
AvatarUrl interface{} `json:"avatar_url"`
GitSshUrl string `json:"git_ssh_url"`
GitHttpUrl string `json:"git_http_url"`
Namespace string `json:"namespace"`
VisibilityLevel int `json:"visibility_level"`
PathWithNamespace string `json:"path_with_namespace"`
DefaultBranch string `json:"default_branch"`
CiConfigPath string `json:"ci_config_path"`
Homepage string `json:"homepage"`
Url string `json:"url"`
SshUrl string `json:"ssh_url"`
HttpUrl string `json:"http_url"`
} `json:"project"`
ObjectAttributes struct {
AssigneeId interface{} `json:"assignee_id"`
AuthorId int `json:"author_id"`
CreatedAt string `json:"created_at"`
Description string `json:"description"`
HeadPipelineId interface{} `json:"head_pipeline_id"`
Id int `json:"id"`
Iid int64 `json:"iid"`
LastEditedAt interface{} `json:"last_edited_at"`
LastEditedById interface{} `json:"last_edited_by_id"`
MergeCommitSha interface{} `json:"merge_commit_sha"`
MergeError interface{} `json:"merge_error"`
MergeParams struct {
ForceRemoveSourceBranch string `json:"force_remove_source_branch"`
} `json:"merge_params"`
MergeStatus string `json:"merge_status"`
MergeUserId interface{} `json:"merge_user_id"`
MergeWhenPipelineSucceeds bool `json:"merge_when_pipeline_succeeds"`
MilestoneId interface{} `json:"milestone_id"`
SourceBranch string `json:"source_branch"`
SourceProjectId int `json:"source_project_id"`
StateId int `json:"state_id"`
TargetBranch string `json:"target_branch"`
TargetProjectId int `json:"target_project_id"`
TimeEstimate int `json:"time_estimate"`
Title string `json:"title"`
UpdatedAt string `json:"updated_at"`
UpdatedById interface{} `json:"updated_by_id"`
Url string `json:"url"`
Source struct {
Id int `json:"id"`
Name string `json:"name"`
Description string `json:"description"`
WebUrl string `json:"web_url"`
AvatarUrl interface{} `json:"avatar_url"`
GitSshUrl string `json:"git_ssh_url"`
GitHttpUrl string `json:"git_http_url"`
Namespace string `json:"namespace"`
VisibilityLevel int `json:"visibility_level"`
PathWithNamespace string `json:"path_with_namespace"`
DefaultBranch string `json:"default_branch"`
CiConfigPath string `json:"ci_config_path"`
Homepage string `json:"homepage"`
Url string `json:"url"`
SshUrl string `json:"ssh_url"`
HttpUrl string `json:"http_url"`
} `json:"source"`
Target struct {
Id int `json:"id"`
Name string `json:"name"`
Description string `json:"description"`
WebUrl string `json:"web_url"`
AvatarUrl interface{} `json:"avatar_url"`
GitSshUrl string `json:"git_ssh_url"`
GitHttpUrl string `json:"git_http_url"`
Namespace string `json:"namespace"`
VisibilityLevel int `json:"visibility_level"`
PathWithNamespace string `json:"path_with_namespace"`
DefaultBranch string `json:"default_branch"`
CiConfigPath string `json:"ci_config_path"`
Homepage string `json:"homepage"`
Url string `json:"url"`
SshUrl string `json:"ssh_url"`
HttpUrl string `json:"http_url"`
} `json:"target"`
LastCommit struct {
Id string `json:"id"`
Message string `json:"message"`
Title string `json:"title"`
Timestamp time.Time `json:"timestamp"`
Url string `json:"url"`
Author struct {
Name string `json:"name"`
Email string `json:"email"`
} `json:"author"`
} `json:"last_commit"`
WorkInProgress bool `json:"work_in_progress"`
TotalTimeSpent int `json:"total_time_spent"`
TimeChange int `json:"time_change"`
HumanTotalTimeSpent interface{} `json:"human_total_time_spent"`
HumanTimeChange interface{} `json:"human_time_change"`
HumanTimeEstimate interface{} `json:"human_time_estimate"`
AssigneeIds []interface{} `json:"assignee_ids"`
State string `json:"state"`
Action string `json:"action"`
} `json:"object_attributes"`
Labels []interface{} `json:"labels"`
Changes struct {
MergeStatus struct {
Previous string `json:"previous"`
Current string `json:"current"`
} `json:"merge_status"`
} `json:"changes"`
Repository struct {
Name string `json:"name"`
Url string `json:"url"`
Description string `json:"description"`
Homepage string `json:"homepage"`
} `json:"repository"`
}
type gitlabCommentHook struct {
ObjectKind string `json:"object_kind"`
EventType string `json:"event_type"`
User struct {
Id int `json:"id"`
Name string `json:"name"`
Username string `json:"username"`
AvatarUrl string `json:"avatar_url"`
Email string `json:"email"`
} `json:"user"`
ProjectId int `json:"project_id"`
Project struct {
Id int `json:"id"`
Name string `json:"name"`
Description string `json:"description"`
WebUrl string `json:"web_url"`
AvatarUrl interface{} `json:"avatar_url"`
GitSshUrl string `json:"git_ssh_url"`
GitHttpUrl string `json:"git_http_url"`
Namespace string `json:"namespace"`
VisibilityLevel int `json:"visibility_level"`
PathWithNamespace string `json:"path_with_namespace"`
DefaultBranch string `json:"default_branch"`
CiConfigPath string `json:"ci_config_path"`
Homepage string `json:"homepage"`
Url string `json:"url"`
SshUrl string `json:"ssh_url"`
HttpUrl string `json:"http_url"`
} `json:"project"`
ObjectAttributes struct {
Attachment interface{} `json:"attachment"`
AuthorId int `json:"author_id"`
ChangePosition interface{} `json:"change_position"`
CommitId interface{} `json:"commit_id"`
CreatedAt string `json:"created_at"`
DiscussionId string `json:"discussion_id"`
Id int `json:"id"`
LineCode interface{} `json:"line_code"`
Note string `json:"note"`
NoteableId int `json:"noteable_id"`
NoteableType string `json:"noteable_type"`
OriginalPosition interface{} `json:"original_position"`
Position interface{} `json:"position"`
ProjectId int `json:"project_id"`
ResolvedAt interface{} `json:"resolved_at"`
ResolvedById interface{} `json:"resolved_by_id"`
ResolvedByPush interface{} `json:"resolved_by_push"`
StDiff interface{} `json:"st_diff"`
System bool `json:"system"`
Type interface{} `json:"type"`
UpdatedAt string `json:"updated_at"`
UpdatedById interface{} `json:"updated_by_id"`
Description string `json:"description"`
Url string `json:"url"`
} `json:"object_attributes"`
Repository struct {
Name string `json:"name"`
Url string `json:"url"`
Description string `json:"description"`
Homepage string `json:"homepage"`
} `json:"repository"`
MergeRequest struct {
AssigneeId interface{} `json:"assignee_id"`
AuthorId int `json:"author_id"`
CreatedAt string `json:"created_at"`
Description string `json:"description"`
HeadPipelineId interface{} `json:"head_pipeline_id"`
Id int `json:"id"`
Iid int64 `json:"iid"`
LastEditedAt interface{} `json:"last_edited_at"`
LastEditedById interface{} `json:"last_edited_by_id"`
MergeCommitSha interface{} `json:"merge_commit_sha"`
MergeError interface{} `json:"merge_error"`
MergeParams struct {
ForceRemoveSourceBranch string `json:"force_remove_source_branch"`
} `json:"merge_params"`
MergeStatus string `json:"merge_status"`
MergeUserId interface{} `json:"merge_user_id"`
MergeWhenPipelineSucceeds bool `json:"merge_when_pipeline_succeeds"`
MilestoneId interface{} `json:"milestone_id"`
SourceBranch string `json:"source_branch"`
SourceProjectId int `json:"source_project_id"`
StateId int `json:"state_id"`
TargetBranch string `json:"target_branch"`
TargetProjectId int `json:"target_project_id"`
TimeEstimate int `json:"time_estimate"`
Title string `json:"title"`
UpdatedAt string `json:"updated_at"`
UpdatedById interface{} `json:"updated_by_id"`
Url string `json:"url"`
Source struct {
Id int `json:"id"`
Name string `json:"name"`
Description string `json:"description"`
WebUrl string `json:"web_url"`
AvatarUrl interface{} `json:"avatar_url"`
GitSshUrl string `json:"git_ssh_url"`
GitHttpUrl string `json:"git_http_url"`
Namespace string `json:"namespace"`
VisibilityLevel int `json:"visibility_level"`
PathWithNamespace string `json:"path_with_namespace"`
DefaultBranch string `json:"default_branch"`
CiConfigPath string `json:"ci_config_path"`
Homepage string `json:"homepage"`
Url string `json:"url"`
SshUrl string `json:"ssh_url"`
HttpUrl string `json:"http_url"`
} `json:"source"`
Target struct {
Id int `json:"id"`
Name string `json:"name"`
Description string `json:"description"`
WebUrl string `json:"web_url"`
AvatarUrl interface{} `json:"avatar_url"`
GitSshUrl string `json:"git_ssh_url"`
GitHttpUrl string `json:"git_http_url"`
Namespace string `json:"namespace"`
VisibilityLevel int `json:"visibility_level"`
PathWithNamespace string `json:"path_with_namespace"`
DefaultBranch string `json:"default_branch"`
CiConfigPath string `json:"ci_config_path"`
Homepage string `json:"homepage"`
Url string `json:"url"`
SshUrl string `json:"ssh_url"`
HttpUrl string `json:"http_url"`
} `json:"target"`
LastCommit struct {
Id string `json:"id"`
Message string `json:"message"`
Title string `json:"title"`
Timestamp time.Time `json:"timestamp"`
Url string `json:"url"`
Author struct {
Name string `json:"name"`
Email string `json:"email"`
} `json:"author"`
} `json:"last_commit"`
WorkInProgress bool `json:"work_in_progress"`
TotalTimeSpent int `json:"total_time_spent"`
TimeChange int `json:"time_change"`
HumanTotalTimeSpent interface{} `json:"human_total_time_spent"`
HumanTimeChange interface{} `json:"human_time_change"`
HumanTimeEstimate interface{} `json:"human_time_estimate"`
AssigneeIds []interface{} `json:"assignee_ids"`
State string `json:"state"`
} `json:"merge_request"`
}

22
hook/pr.go Normal file
View File

@ -0,0 +1,22 @@
package hook
import "time"
type (
// PullRequest represents a repository pull request.
PullRequest struct {
Number int64
Title string
Body string
Base Reference
Head Reference
Author User
Created time.Time
Updated time.Time
}
Comment struct {
Body string
Author User
}
)

27
hook/repo.go Normal file
View File

@ -0,0 +1,27 @@
package hook
import "time"
type Repository struct {
Id string `json:"id"`
Ref string `json:"ref"`
Sha string `json:"sha"`
CloneURL string `json:"cloneURL"`
CreatedAt time.Time `json:"createdAt"`
Branch string `json:"branch"`
Description string `json:"description"`
FullName string `json:"fullName"`
GitHttpURL string `json:"gitHttpURL"`
GitShhURL string `json:"gitSshURL"`
GitSvnURL string `json:"gitSvnURL"`
GitURL string `json:"gitURL"`
HtmlURL string `json:"htmlURL"`
SshURL string `json:"sshURL"`
SvnURL string `json:"svnURL"`
Name string `json:"name"`
Private bool `json:"private"`
URL string `json:"url"`
Owner string `json:"owner"`
RepoType string `json:"repoType"`
RepoOpenid string `json:"repoOpenid"`
}

55
hook/webhook.go Normal file
View File

@ -0,0 +1,55 @@
package hook
import (
"net/http"
)
type (
WebHook interface {
Repository() Repository
}
WebhookService interface {
Parse(req *http.Request, fn SecretFunc) (WebHook, error)
}
PushHook struct {
Ref string
Repo Repository
Before string
After string
Commit Commit
Sender User
Commits []Commit
}
// BranchHook represents a branch or tag event,
// eg create and delete github event types.
BranchHook struct {
Ref Reference
Repo Repository
Sender User
}
PullRequestHook struct {
Action string
Repo Repository
TargetRepo Repository
PullRequest PullRequest
Sender User
}
PullRequestCommentHook struct {
Action string
Repo Repository
TargetRepo Repository
PullRequest PullRequest
Comment Comment
Sender User
}
SecretFunc func(webhook WebHook) (string, error)
)
func (h *PushHook) Repository() Repository { return h.Repo }
func (h *BranchHook) Repository() Repository { return h.Repo }
func (h *PullRequestHook) Repository() Repository { return h.Repo }
func (h *PullRequestCommentHook) Repository() Repository { return h.Repo }

99
main.go
View File

@ -1,103 +1,10 @@
package main
import (
"flag"
"gokins/comm"
"gokins/core"
"gokins/mgr"
"gokins/route"
"gokins/service"
"gokins/service/dbService"
"os"
"path/filepath"
ruisIo "github.com/mgr9525/go-ruisutil/ruisio"
"github.com/gin-gonic/gin"
"github.com/gokins-main/gokins/cmd"
//_ "github.com/mattn/go-sqlite3"
)
var (
clearPass = ""
mvData = ""
upgrade = false
)
func init() {
path, err := os.Executable()
if err != nil {
println("path err:" + err.Error())
return
}
println("path:" + path)
comm.Path = path
dir, err := filepath.Abs(filepath.Dir(path))
if err != nil {
println("dir err:" + err.Error())
return
}
println("dir:" + dir)
flag.StringVar(&comm.Dir, "d", dir, "工作目录")
flag.StringVar(&comm.Host, "bind", ":8030", "绑定地址")
flag.BoolVar(&comm.NoUppass, "nps", false, "是否禁止修改密码")
flag.IntVar(&comm.RunTaskLen, "rln", 5, "同时执行的流水线数量")
flag.StringVar(&clearPass, "clp", "", "清除某个用户密码(请先关闭服务在执行)")
flag.StringVar(&mvData, "mvdata", "", "转移某个库数据到本地(目前不转移运行日志记录)")
flag.BoolVar(&upgrade, "up", false, "升级数据库(迁移流水线,触发器)")
flag.Parse()
comm.Gin = gin.Default()
}
func main() {
if upgrade {
os.RemoveAll(comm.Dir + "/data/logs")
if !ruisIo.PathExists(comm.Dir + "/db.dat") {
println("未找到原有数据库")
return
}
os.Remove(comm.Dir + "/db.bak")
os.Rename(comm.Dir+"/db.dat", comm.Dir+"/db.bak")
}
if !ruisIo.PathExists(comm.Dir + "/data") {
err := os.MkdirAll(comm.Dir+"/data", 0755)
if err != nil {
println("Mkdir data err:" + err.Error())
return
}
}
err := comm.InitDb()
if err != nil {
println("InitDb err:" + err.Error())
return
}
if clearPass != "" {
service.ClearUPass(clearPass)
return
}
if mvData != "" {
service.MoveData(mvData)
return
}
if upgrade {
service.Upgrade()
return
}
runWeb()
}
func runWeb() {
jwtKey := dbService.GetParam("jwt-key")
jkey := jwtKey.GetString("key")
if jkey == "" {
jkey = core.RandomString(32)
jwtKey.Set("key", jkey)
dbService.SetParam("jwt-key", jwtKey)
}
core.JwtKey = jkey
route.Init()
mgr.ExecMgr.Start()
mgr.TriggerMgr.Start()
err := comm.Gin.Run(comm.Host)
if err != nil {
println("gin run err:" + err.Error())
}
mgr.Cancel()
cmd.Run()
}

View File

@ -1,89 +0,0 @@
package mgr
import (
"gokins/comm"
"gokins/model"
"sync"
"time"
ruisUtil "github.com/mgr9525/go-ruisutil"
)
var ExecMgr = &execManager{}
type execManager struct {
tm time.Time
lk sync.Mutex
tasks map[int]*RunTask
}
func (c *execManager) Start() {
c.tasks = make(map[int]*RunTask)
go func() {
for {
select {
case <-mgrCtx.Done():
goto end
default:
c.run()
time.Sleep(time.Second)
}
}
end:
println("ctx end!")
}()
}
func (c *execManager) run() {
defer ruisUtil.Recovers("run", func(errs string) {
println("execManager run err:" + errs)
})
if time.Since(c.tm).Seconds() < 20 {
return
}
c.tm = time.Now()
c.lk.Lock()
defer c.lk.Unlock()
for k, v := range c.tasks {
if v.cncl == nil {
delete(c.tasks, k)
}
}
if len(c.tasks) >= comm.RunTaskLen {
return
}
var ls []*model.TModelRun
err := comm.Db.Where("state=0 or state=1").Find(&ls)
if err != nil {
println("execManager err:" + err.Error())
return
}
for _, v := range ls {
if v.State == 0 {
v.State = 1
comm.Db.Cols("state").Where("id=?", v.Id).Update(v)
}
_, ok := c.tasks[v.Id]
if !ok {
e := &RunTask{Mr: v}
c.tasks[v.Id] = e
e.start()
}
}
}
func (c *execManager) Refresh() {
c.tm = time.Time{}
}
func (c *execManager) StopTask(id int) {
c.lk.Lock()
defer c.lk.Unlock()
e, ok := c.tasks[id]
if ok {
e.stop()
}
//v := &model.TModelRun{}
//v.State = -1
//_, err := comm.Db.Cols("state").Where("id=?", v.Id).Update(v)
//return err
}

View File

@ -1,12 +0,0 @@
package mgr
import "context"
var (
mgrCtx context.Context
Cancel context.CancelFunc
)
func init() {
mgrCtx, Cancel = context.WithCancel(context.Background())
}

View File

@ -1,246 +0,0 @@
package mgr
import (
"bytes"
"context"
"errors"
"fmt"
"gokins/comm"
"gokins/model"
"gokins/service/dbService"
"os"
"os/exec"
"path/filepath"
"runtime"
"strings"
"time"
ruisIo "github.com/mgr9525/go-ruisutil/ruisio"
ruisUtil "github.com/mgr9525/go-ruisutil"
)
type RunTask struct {
Md *model.TModel
Mr *model.TModelRun
plugs []*model.TPlugin
ctx context.Context
cncl context.CancelFunc
}
func (c *RunTask) start() {
if c.Mr == nil || c.cncl != nil {
return
}
c.Md = dbService.GetModel(c.Mr.Tid)
if c.Md == nil {
return
}
c.ctx, c.cncl = context.WithCancel(mgrCtx)
go func() {
c.run()
c.cncl = nil
}()
}
func (c *RunTask) run() {
defer ruisUtil.Recovers("RunTask start", nil)
c.plugs = nil
if c.Md.Wrkdir != "" {
if ruisIo.PathExists(c.Md.Wrkdir) {
if c.Md.Clrdir == 1 {
err := rmDirFiles(c.Md.Wrkdir)
if err != nil {
c.end(2, "工作目录创建失败:"+err.Error())
return
}
}
} else {
/*if c.Md.Clrdir != 1 {
c.end(2, "工作目录不存在")
return
}*/
err := os.MkdirAll(c.Md.Wrkdir, 0755)
if err != nil {
c.end(2, "工作目录创建失败:"+err.Error())
return
}
}
}
err := comm.Db.Where("del!='1' and tid=?", c.Mr.Tid).OrderBy("sort ASC,id ASC").Find(&c.plugs)
if err != nil {
c.end(2, "db err:"+err.Error())
return
}
if len(c.plugs) <= 0 {
c.end(2, "无插件")
return
}
for _, v := range c.plugs {
select {
case <-c.ctx.Done():
c.end(-1, "手动停止")
return
default:
rn, err := c.runs(v)
if rn != nil {
rn.Timesd = time.Now()
_, errs := comm.Db.Cols("state", "excode", "timesd").Where("id=?", rn.Id).Update(rn)
if err == nil && errs != nil {
err = errs
}
}
if err != nil {
println("cmd run err:", err.Error())
c.end(2, err.Error())
return
}
time.Sleep(time.Second)
}
}
c.end(4, "")
}
func writeEnvs(buf *bytes.Buffer, line string) {
if strings.Index(line, "=") <= 0 {
return
}
vs := strings.ReplaceAll(line, "\t", ``)
vs = strings.ReplaceAll(vs, "\n", ` `)
vs = strings.ReplaceAll(vs, `"`, `\"`)
vs = strings.ReplaceAll(vs, `'`, `\'`)
buf.WriteString("\n")
buf.WriteString(fmt.Sprintf(`export %s`, vs))
buf.WriteString("\n")
}
func (c *RunTask) runs(pgn *model.TPlugin) (rns *model.TPluginRun, rterr error) {
defer ruisUtil.Recovers("RunTask.run", func(errs string) {
rterr = errors.New(errs)
})
rn := dbService.FindPluginRun(c.Mr.Tid, c.Mr.Id, pgn.Id)
if rn == nil {
rn = &model.TPluginRun{Mid: c.Mr.Tid, Tid: c.Mr.Id, Pid: pgn.Id}
rn.Times = time.Now()
rn.State = 1
_, err := comm.Db.Insert(rn)
if err != nil {
return nil, err
}
} else if rn.State != 0 {
rn.State = 2
return rn, errors.New("already run")
}
select {
case <-c.ctx.Done():
rn.State = -1
return rn, nil
default:
}
logpth := fmt.Sprintf("%s/data/logs/%d/%d.log", comm.Dir, rn.Tid, rn.Id)
if !ruisIo.PathExists(filepath.Dir(logpth)) {
err := os.MkdirAll(filepath.Dir(logpth), 0755)
if err != nil {
println("MkdirAll err:" + err.Error())
rn.State = 2
return rn, err
}
}
logfl, err := os.OpenFile(logpth, os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
println("MkdirAll err:" + err.Error())
rn.State = 2
return rn, err
}
defer logfl.Close()
buf := &bytes.Buffer{}
if c.Md.Envs != "" {
str := strings.ReplaceAll(c.Md.Envs, "\t", "")
envs := strings.Split(str, "\n")
for _, s := range envs {
writeEnvs(buf, s)
}
}
writeEnvs(buf, "WORKDIR="+c.Md.Wrkdir)
buf.WriteString("\n")
buf.WriteString(pgn.Cont)
buf.WriteString("\n")
name := "sh"
par0 := "-c"
if runtime.GOOS == "windows" {
name = "cmd"
par0 = "/c"
}
cmd := exec.CommandContext(c.ctx, name, par0, buf.String())
cmd.Stdout = logfl
cmd.Stderr = logfl
if c.Md.Wrkdir != "" {
cmd.Dir = c.Md.Wrkdir
} else if comm.Dir != "" {
cmd.Dir = comm.Dir
}
err = cmd.Run()
rn.State = 4
if err != nil {
println("cmd.run err:" + err.Error())
// rn.State = 2
// return rn, err
}
fmt.Println(fmt.Sprintf("cmdRun(%s)dir:%s", pgn.Title, cmd.Dir))
if cmd.ProcessState != nil {
rn.Excode = cmd.ProcessState.ExitCode()
}
if pgn.Exend == 1 && (err != nil || rn.Excode != 0) {
rn.State = 2
return rn, fmt.Errorf("程序执行错误(exit:%d)%+v", rn.Excode, err)
}
return rn, nil
}
func (c *RunTask) end(stat int, errs string) {
defer c.stop()
c.Mr.State = stat
c.Mr.Errs = errs
c.Mr.Timesd = time.Now()
_, err := comm.Db.Cols("state", "errs", "timesd").Where("id=?", c.Mr.Id).Update(c.Mr)
if err != nil {
println("db err:", err.Error())
return
}
var ls []*model.TTrigger
err = comm.Db.Where("del!=1 and enable=1 and meid=?", c.Md.Id).Find(&ls)
if err != nil {
println("db err:", err.Error())
return
}
for _, v := range ls {
TriggerMgr.StartOne(v, c.Md, c.Mr)
}
}
func (c *RunTask) stop() {
if c.cncl != nil {
c.cncl()
c.cncl = nil
}
}
func rmDirFiles(dir string) error {
d, err := os.Open(dir)
if err != nil {
return err
}
defer d.Close()
names, err := d.Readdirnames(0)
if err != nil {
return err
}
for _, name := range names {
err = os.RemoveAll(filepath.Join(dir, name))
if err != nil {
return err
}
}
return nil
}

View File

@ -1,122 +0,0 @@
package mgr
import (
"gokins/comm"
"gokins/model"
"sync"
"time"
ruisUtil "github.com/mgr9525/go-ruisutil"
)
var TriggerMgr = &triggerManager{}
type triggerManager struct {
tmChk time.Time
tmRfs time.Time
lk sync.Mutex
tasks map[int]iTrigger
}
type iTrigger interface {
start(pars ...interface{}) error
stop()
isRun() bool
}
func (c *triggerManager) Start() {
c.tasks = make(map[int]iTrigger)
go func() {
for {
select {
case <-mgrCtx.Done():
goto end
default:
c.runChk()
c.runRfs()
time.Sleep(time.Second)
}
}
end:
println("ctx end!")
}()
}
func (c *triggerManager) runChk() {
defer ruisUtil.Recovers("triggerManager runChk", nil)
if time.Since(c.tmChk).Seconds() < 30 {
return
}
c.tmChk = time.Now()
c.lk.Lock()
defer c.lk.Unlock()
for k, v := range c.tasks {
if !v.isRun() {
delete(c.tasks, k)
}
}
}
func (c *triggerManager) runRfs() {
defer ruisUtil.Recovers("run", func(errs string) {
println("triggerManager run err:" + errs)
})
if time.Since(c.tmRfs).Minutes() < 30 {
return
}
c.tmRfs = time.Now()
var ls []*model.TTrigger
// 目前只有timer需要自动Task
err := comm.Db.Where("del!=1 and enable=1").And("types='timer'").Find(&ls)
if err != nil {
println("triggerManager err:" + err.Error())
return
}
for _, v := range ls {
c.lk.Lock()
_, ok := c.tasks[v.Id]
c.lk.Unlock()
if !ok {
c.StartOne(v)
}
}
}
func (c *triggerManager) Refresh(id int) {
c.tmRfs = time.Time{}
c.lk.Lock()
defer c.lk.Unlock()
v, ok := c.tasks[id]
if ok {
v.stop()
delete(c.tasks, id)
}
}
func (c *triggerManager) StartOne(trg *model.TTrigger, pars ...interface{}) {
defer ruisUtil.Recovers("StartOne", nil)
if trg.Del == 1 || trg.Enable != 1 {
return
}
var i iTrigger
switch trg.Types {
case "timer":
i = &trigTimeTask{tg: trg}
case "hook":
i = &trigHookTask{tg: trg}
case "worked":
i = &trigWorkedTask{tg: trg}
}
if i == nil {
return
}
errs := i.start(pars...)
trg.Errs = ""
if errs != nil {
trg.Errs = errs.Error()
println("trigTimeTask start err:" + trg.Errs)
} else {
c.lk.Lock()
c.tasks[trg.Id] = i
c.lk.Unlock()
}
comm.Db.Cols("errs").Where("id=?", trg.Id).Update(trg)
}

View File

@ -1,209 +0,0 @@
package mgr
import (
"context"
"crypto/hmac"
"crypto/sha1"
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"gokins/comm"
"gokins/model"
"gokins/service/dbService"
"net/http"
"net/url"
"strings"
"time"
"github.com/dop251/goja"
ruisUtil "github.com/mgr9525/go-ruisutil"
)
type confHookBean struct {
Plug string `json:"plug"`
Conf string `json:"conf"`
}
type trigHookTask struct {
md *model.TModel
tg *model.TTrigger
conf *confHookBean
ctx context.Context
cncl context.CancelFunc
js string
vm *goja.Runtime
querys string
header http.Header
bodys []byte
body *ruisUtil.Map
confs *ruisUtil.Map
}
func (c *trigHookTask) stop() {
if c.cncl != nil {
c.cncl()
c.cncl = nil
}
}
func (c *trigHookTask) isRun() bool {
return c.cncl != nil
}
func (c *trigHookTask) start(pars ...interface{}) error {
if c.tg == nil || c.cncl != nil {
return errors.New("already run")
}
if len(pars) < 3 {
return errors.New("param err")
}
c.querys = pars[0].(string)
c.header = pars[1].(http.Header)
c.bodys = pars[2].([]byte)
c.conf = &confHookBean{}
err := json.Unmarshal([]byte(c.tg.Config), c.conf)
if err != nil {
return err
}
c.confs = ruisUtil.NewMapo(c.conf.Conf)
c.md = dbService.GetModel(c.tg.Mid)
if c.md == nil {
return errors.New("not found model")
}
js, ok := HookjsMap[c.conf.Plug]
if !ok {
return errors.New("not found plugin:" + c.conf.Plug)
}
c.js = js.js
c.ctx, c.cncl = context.WithCancel(mgrCtx)
go func() {
defer ruisUtil.Recovers("gorun", nil)
c.vm = goja.New()
c.tg.Errs = ""
err := c.run()
if err != nil {
c.tg.Errs = err.Error()
}
comm.Db.Cols("errs").Where("id=?", c.tg.Id).Update(c.tg)
c.stop()
}()
go func() {
for {
select {
case <-c.ctx.Done():
if c.vm != nil {
c.vm.ClearInterrupt()
}
goto end
default:
time.Sleep(time.Millisecond)
}
}
end:
println("ctx end")
}()
return nil
}
func (c *trigHookTask) run() (rterr error) {
defer ruisUtil.Recovers("RunTask start", func(errs string) {
println("trigHookTask run err:" + errs)
rterr = errors.New(errs)
})
c.initVm()
rslt, err := c.vm.RunString(c.js)
if err != nil {
println("vm.RunString err:" + err.Error())
return err
}
println(fmt.Sprintf("js result:%+v", rslt.Export()))
mainFun, ok := goja.AssertFunction(c.vm.Get("main"))
if !ok {
println("not found main err")
return errors.New("not found main err")
}
ret, err := mainFun(goja.Null())
if err != nil {
println("vm mainFun err:" + err.Error())
return err
}
rets := ruisUtil.NewMapo(ret.Export())
fmt.Printf("rets:%+v\n", rets)
if rets.GetBool("check") {
rn := &model.TModelRun{}
rn.Tid = c.md.Id
rn.Uid = c.tg.Uid
rn.Times = time.Now()
rn.Tgid = c.tg.Id
rn.Tgtyps = c.conf.Plug + "触发"
comm.Db.Insert(rn)
ExecMgr.Refresh()
println("trigHookTask model run id:", rn.Id)
} else {
errs := rets.GetString("errs")
if errs != "" {
return errors.New(errs)
}
}
return nil
}
func (c *trigHookTask) initVm() {
csl := c.vm.NewObject()
c.vm.Set("console", csl)
csl.Set("log", func(args ...interface{}) {
fmt.Println(args)
})
c.vm.Set("getHeader", func(key string) string {
return c.header.Get(key)
})
c.vm.Set("getBodys", func(key string) string {
return string(c.bodys)
})
c.vm.Set("getBody", func() interface{} {
if c.body == nil {
if c.conf.Plug == "github" {
unescape, _ := url.QueryUnescape(string(c.bodys)[8:])
c.bodys = []byte(unescape)
}
c.body = ruisUtil.NewMapo(c.bodys)
}
return c.body
})
c.vm.Set("getConf", func() interface{} {
return c.confs
})
c.vm.Set("encodeSha1", func(body string) string {
return ruisUtil.Sha1String(body)
})
c.vm.Set("verifySignature", func(secret string, signature string, body []byte) bool {
return verifySignature([]byte(secret), signature, body)
})
}
func signBody(secret, body []byte) []byte {
computed := hmac.New(sha1.New, secret)
computed.Write(body)
return []byte(computed.Sum(nil))
}
func verifySignature(secret []byte, signature string, body []byte) bool {
const signaturePrefix = "sha1="
const signatureLength = 45 // len(SignaturePrefix) + len(hex(sha1))
if len(signature) != signatureLength || !strings.HasPrefix(signature, signaturePrefix) {
return false
}
actual := make([]byte, 20)
hex.Decode(actual, []byte(signature[5:]))
return hmac.Equal(signBody(secret, body), actual)
}

View File

@ -1,181 +0,0 @@
package mgr
type Hookjs struct {
Uis map[string]string
Desc string
Defs string
js string
}
var HookjsMap map[string]*Hookjs
func init() {
HookjsMap = make(map[string]*Hookjs)
HookjsMap["web"] = &Hookjs{
Uis: map[string]string{"password": "string"},
Desc: "password:触发密码",
Defs: `{"password":"pwd"}`,
js: `
function main(){
console.log('start run main function!!!!');
var ret={check:false};
var conf=getConf();
var body=getBody();
if(conf.password!=body.password){
ret.errs='触发请求密码错误';
return ret;
}
ret.check=true;
return ret
}
`,
}
HookjsMap["gitee"] = &Hookjs{
Uis: map[string]string{"password": "string", "branch": "string"},
Desc: "password:推送密码,branch:push对象分支",
Defs: `{"password":"pwd","branch":"master"}`,
js: `
function main(){
console.log('start run main function!!!!');
var ret={check:false};
var conf=getConf();
var body=getBody();
if(conf.password!=body.password){
ret.errs='触发请求密码错误';
return ret;
}
if(body.hook_name!='push_hooks'||!body.ref||body.ref==''){
return ret;
}
console.log(conf.branch,body.ref);
if(conf.branch&&conf.branch!=''&&body.ref!='refs/heads/'+conf.branch){
return ret;
}
ret.check=true;
return ret
}
`,
}
HookjsMap["gitea"] = &Hookjs{
Uis: map[string]string{"token": "string", "branch": "string"},
Desc: "secret:秘钥,branch:push对象分支",
Defs: `{"secret":"pwd","branch":"master"}`,
js: `
function main(){
console.log('start run main function!!!!');
var ret={check:false};
var conf=getConf();
var body=getBody();
var event=getHeader('X-Gitea-Event');
if(conf.secret!=body.secret){
ret.errs='触发请求秘钥错误';
return ret;
}
if(event!='push'||!body.ref||body.ref==''){
return ret;
}
console.log(conf.branch,body.ref);
if(conf.branch&&conf.branch!=''&&body.ref!='refs/heads/'+conf.branch){
return ret;
}
ret.check=true;
return ret
}
`,
}
// 添加对阿里云的codeup的支持
HookjsMap["codeup"] = &Hookjs{
Uis: map[string]string{"token": "string", "branch": "string"},
Desc: "secret:秘钥,branch:push对象分支",
Defs: `{"token":"pwd","branch":"master"}`,
js: `
function main(){
console.log('start run main function!!!!');
var ret={check:false};
var conf=getConf();
var body=getBody();
var secret=getHeader('X-Codeup-Token');
var event=getHeader('X-Codeup-Event');
if(conf.secret!=body.secret){
ret.errs='触发请求秘钥错误';
return ret;
}
if(event!='Push Hook'||!body.ref||body.ref==''){
return ret;
}
console.log(conf.branch,body.ref);
if(conf.branch&&conf.branch!=''&&body.ref!='refs/heads/'+conf.branch){
return ret;
}
ret.check=true;
return ret
}
`,
}
HookjsMap["github"] = &Hookjs{
Uis: map[string]string{"password": "string", "branch": "string"},
Desc: "password:签名秘钥,branch:push对象分支",
Defs: `{"password":"pwd","branch":"master"}`,
js: `
function main(){
console.log('start run main function!!!!');
var ret={check:false};
var conf=getConf();
var body=getBody();
var bodys=getBodys();
var nm=getHeader('X-GitHub-Event');
var tk=getHeader('X-Hub-Signature');
if(verifySignature(tk,conf.password,bodys)){
ret.errs='触发请求秘钥错误';
return ret;
}
if(nm!='push'||!body.ref||body.ref==''){
return ret;
}
console.log(conf.branch,body.ref);
if(conf.branch&&conf.branch!=''&&body.ref!='refs/heads/'+conf.branch){
return ret;
}
ret.check=true;
return ret
}
`,
}
HookjsMap["gitlab"] = &Hookjs{
Uis: map[string]string{"token": "string", "branch": "string"},
Desc: "token:秘钥,branch:push对象分支",
Defs: `{"token":"pwd","branch":"master"}`,
js: `
function main(){
console.log('start run main function!!!!');
var ret={check:false};
var conf=getConf();
var body=getBody();
var tk=getHeader('X-Gitlab-Token');
if(conf.token!=tk){
ret.errs='触发请求秘钥错误';
return ret;
}
if(body.object_kind!='push'||!body.ref||body.ref==''){
return ret;
}
console.log(conf.branch,body.ref);
if(conf.branch&&conf.branch!=''&&body.ref!='refs/heads/'+conf.branch){
return ret;
}
ret.check=true;
return ret
}
`,
}
}

View File

@ -1,144 +0,0 @@
package mgr
import (
"context"
"encoding/json"
"errors"
"fmt"
"gokins/comm"
"gokins/model"
"gokins/service/dbService"
"time"
ruisUtil "github.com/mgr9525/go-ruisutil"
)
type confTimeBean struct {
Repeated string `json:"repeated"`
Dates string `json:"dates"`
Date time.Time `json:"-"`
}
type trigTimeTask struct {
md *model.TModel
tg *model.TTrigger
conf *confTimeBean
ctx context.Context
cncl context.CancelFunc
runtm time.Time
}
func (c *trigTimeTask) start(pars ...interface{}) error {
if c.tg == nil || c.cncl != nil {
return errors.New("already run")
}
c.conf = &confTimeBean{}
err := json.Unmarshal([]byte(c.tg.Config), c.conf)
if err != nil {
return err
}
tms, err := time.Parse(comm.TimeFmtpck, c.conf.Dates)
if err != nil {
return err
}
c.conf.Date = tms.Local()
println(fmt.Sprintf("%d-%d-%d %d:%d:%d", c.conf.Date.Year(), c.conf.Date.Month(), c.conf.Date.Day(), c.conf.Date.Hour(), c.conf.Date.Minute(), c.conf.Date.Second()))
c.md = dbService.GetModel(c.tg.Mid)
if c.md == nil {
return errors.New("not found model")
}
c.ctx, c.cncl = context.WithCancel(mgrCtx)
go func() {
for {
select {
case <-c.ctx.Done():
goto end
default:
c.run()
time.Sleep(time.Millisecond * 200)
}
}
end:
c.cncl = nil
println("ctx end!")
}()
return nil
}
func (c *trigTimeTask) stop() {
if c.cncl != nil {
c.cncl()
c.cncl = nil
}
}
func (c *trigTimeTask) isRun() bool {
return c.cncl != nil
}
func (c *trigTimeTask) run() {
defer ruisUtil.Recovers("RunTask start", func(errs string) {
println("trigTimeTask run err:" + errs)
})
if time.Since(c.runtm).Seconds() < 5 {
return
}
isend := false
match := false
switch c.conf.Repeated {
case "1":
match = c.check(0, 0, 0, 1, 1, 1, 0)
case "2":
match = c.check(0, 0, 0, 1, 1, 1, 1)
case "3":
match = c.check(0, 0, 1, 1, 1, 1, 0)
case "4":
match = c.check(0, 1, 1, 1, 1, 1, 0)
default:
isend = true
match = c.check(1, 1, 1, 1, 1, 1, 0)
}
if match {
if isend {
defer c.end()
}
c.runtm = time.Now()
rn := &model.TModelRun{}
rn.Tid = c.md.Id
rn.Uid = c.tg.Uid
rn.Times = time.Now()
rn.Tgid = c.tg.Id
rn.Tgtyps = "定时器触发"
comm.Db.Insert(rn)
ExecMgr.Refresh()
println("trigTimeTask model run id:", rn.Id)
}
}
func (c *trigTimeTask) check(y, m, d, h, min, s, w int) bool {
now := time.Now()
if y == 1 && now.Year() != c.conf.Date.Year() {
return false
}
if m == 1 && now.Month() != c.conf.Date.Month() {
return false
}
if d == 1 && now.Day() != c.conf.Date.Day() {
return false
}
if h == 1 && now.Hour() != c.conf.Date.Hour() {
return false
}
if min == 1 && now.Minute() != c.conf.Date.Minute() {
return false
}
if s == 1 && now.Second() != c.conf.Date.Second() {
return false
}
if w == 1 && now.Weekday() != c.conf.Date.Weekday() {
return false
}
return true
}
func (c *trigTimeTask) end() {
c.tg.Enable = 2
comm.Db.Cols("enable").Where("id=?", c.tg.Id).Update(c.tg)
c.stop()
}

View File

@ -1,87 +0,0 @@
package mgr
import (
"context"
"encoding/json"
"errors"
"gokins/comm"
"gokins/model"
"gokins/service/dbService"
"time"
ruisUtil "github.com/mgr9525/go-ruisutil"
)
type confWorkedBean struct {
Forced bool `json:"forced"`
}
type trigWorkedTask struct {
md *model.TModel
tg *model.TTrigger
conf *confWorkedBean
ctx context.Context
cncl context.CancelFunc
mde *model.TModel
mr *model.TModelRun
}
func (c *trigWorkedTask) start(pars ...interface{}) error {
if c.tg == nil || c.cncl != nil {
return errors.New("already run")
}
if len(pars) < 2 {
return errors.New("param err")
}
c.mde = pars[0].(*model.TModel)
c.mr = pars[1].(*model.TModelRun)
c.conf = &confWorkedBean{}
err := json.Unmarshal([]byte(c.tg.Config), c.conf)
if err != nil {
return err
}
c.md = dbService.GetModel(c.tg.Mid)
if c.md == nil {
return errors.New("not found model")
}
c.ctx, c.cncl = context.WithCancel(mgrCtx)
go func() {
c.tg.Errs = ""
err := c.run()
if err != nil {
c.tg.Errs = err.Error()
}
comm.Db.Cols("errs").Where("id=?", c.tg.Id).Update(c.tg)
c.stop()
println("ctx end!")
}()
return nil
}
func (c *trigWorkedTask) stop() {
if c.cncl != nil {
c.cncl()
c.cncl = nil
}
}
func (c *trigWorkedTask) isRun() bool {
return c.cncl != nil
}
func (c *trigWorkedTask) run() error {
defer ruisUtil.Recovers("RunTask start", func(errs string) {
println("trigWorkedTask run err:" + errs)
})
var err error
if c.conf.Forced || c.mr.State == 4 {
rn := &model.TModelRun{}
rn.Tid = c.md.Id
rn.Uid = c.tg.Uid
rn.Times = time.Now()
rn.Tgid = c.tg.Id
rn.Tgtyps = "流水线触发"
_, err = comm.Db.Insert(rn)
ExecMgr.Refresh()
println("trigWorkedTask model run id:", rn.Id)
}
return err
}

155
migrates/init.go Normal file
View File

@ -0,0 +1,155 @@
package migrates
import (
"database/sql"
"errors"
"fmt"
"github.com/gokins-main/gokins/comm"
"github.com/golang-migrate/migrate/v4"
"github.com/golang-migrate/migrate/v4/database/mysql"
"github.com/golang-migrate/migrate/v4/database/sqlite3"
_ "github.com/golang-migrate/migrate/v4/source/file"
bindata "github.com/golang-migrate/migrate/v4/source/go_bindata"
"path/filepath"
"strings"
)
func InitMysqlMigrate(host, dbs, user, pass string) (wait bool, rtul string, errs error) {
wait = false
if host == "" || dbs == "" || user == "" {
errs = errors.New("database config not found")
return
}
wait = true
ul := fmt.Sprintf("%s:%s@tcp(%s)/%s?parseTime=true&multiStatements=true",
user,
pass,
host,
dbs)
db, err := sql.Open("mysql", ul)
if err != nil {
errs = err
return
}
err = db.Ping()
if err != nil {
db.Close()
uls := fmt.Sprintf("%s:%s@tcp(%s)/?parseTime=true&multiStatements=true",
user,
pass,
host)
db, err = sql.Open("mysql", uls)
if err != nil {
println("open dbs err:" + err.Error())
errs = err
return
}
defer db.Close()
_, err = db.Exec(fmt.Sprintf("CREATE DATABASE `%s` DEFAULT CHARACTER SET utf8mb4;", dbs))
if err != nil {
println("create dbs err:" + err.Error())
errs = err
return
}
db.Exec(fmt.Sprintf("USE `%s`;", dbs))
err = db.Ping()
}
defer db.Close()
wait = false
if err != nil {
errs = err
return
}
// Run migrations
driver, err := mysql.WithInstance(db, &mysql.Config{})
if err != nil {
println("could not start sql migration... ", err.Error())
errs = err
return
}
defer driver.Close()
var nms []string
tms := comm.AssetNames()
for _, v := range tms {
if strings.HasPrefix(v, "mysql") {
nms = append(nms, strings.Replace(v, "mysql/", "", 1))
}
}
s := bindata.Resource(nms, func(name string) ([]byte, error) {
return comm.Asset("mysql/" + name)
})
sc, err := bindata.WithInstance(s)
if err != nil {
errs = err
return
}
defer sc.Close()
mgt, err := migrate.NewWithInstance(
"bindata", sc,
"mysql", driver)
if err != nil {
errs = err
return
}
defer mgt.Close()
err = mgt.Up()
if err != nil && err != migrate.ErrNoChange {
mgt.Down()
errs = err
return
}
return false, ul, nil
}
func InitSqliteMigrate() (rtul string, errs error) {
ul := filepath.Join(comm.WorkPath, "db.dat")
db, err := sql.Open("sqlite3", ul)
if err != nil {
errs = err
return
}
defer db.Close()
// Run migrations
driver, err := sqlite3.WithInstance(db, &sqlite3.Config{})
if err != nil {
println("could not start sql migration... ", err.Error())
errs = err
return
}
defer driver.Close()
var nms []string
tms := comm.AssetNames()
for _, v := range tms {
if strings.HasPrefix(v, "sqlite") {
nms = append(nms, strings.Replace(v, "sqlite/", "", 1))
}
}
s := bindata.Resource(nms, func(name string) ([]byte, error) {
return comm.Asset("sqlite/" + name)
})
sc, err := bindata.WithInstance(s)
if err != nil {
errs = err
return
}
defer sc.Close()
mgt, err := migrate.NewWithInstance(
"bindata", sc,
"sqlite3", driver)
if err != nil {
errs = err
return
}
defer mgt.Close()
err = mgt.Up()
if err != nil && err != migrate.ErrNoChange {
mgt.Down()
errs = err
return
}
return ul, nil
}

View File

@ -0,0 +1,24 @@
DROP TABLE IF EXISTS `t_artifact_package`;
DROP TABLE IF EXISTS `t_artifact_version`;
DROP TABLE IF EXISTS `t_artifactory`;
DROP TABLE IF EXISTS `t_build`;
DROP TABLE IF EXISTS `t_cmd_line`;
DROP TABLE IF EXISTS `t_stage`;
DROP TABLE IF EXISTS `t_step`;
DROP TABLE IF EXISTS `t_trigger`;
DROP TABLE IF EXISTS `t_trigger_run`;
DROP TABLE IF EXISTS `t_message`;
DROP TABLE IF EXISTS `t_org`;
DROP TABLE IF EXISTS `t_org_pipe`;
DROP TABLE IF EXISTS `t_pipeline`;
DROP TABLE IF EXISTS `t_pipeline_conf`;
DROP TABLE IF EXISTS `t_pipeline_var`;
DROP TABLE IF EXISTS `t_pipeline_version`;
DROP TABLE IF EXISTS `t_param`;
DROP TABLE IF EXISTS `t_user`;
DROP TABLE IF EXISTS `t_user_info`;
DROP TABLE IF EXISTS `t_user_org`;
DROP TABLE IF EXISTS `t_user_msg`;
DROP TABLE IF EXISTS `t_user_token`;
DROP TABLE IF EXISTS `t_yml_plugin`;
DROP TABLE IF EXISTS `t_yml_template`;

View File

@ -0,0 +1,419 @@
CREATE TABLE `t_artifact_package` (
`id` varchar(64) NOT NULL,
`aid` bigint(20) NOT NULL AUTO_INCREMENT,
`repo_id` varchar(64) DEFAULT NULL,
`name` varchar(100) DEFAULT NULL,
`display_name` varchar(255) DEFAULT NULL,
`desc` varchar(500) DEFAULT NULL,
`created` datetime DEFAULT NULL,
`updated` datetime DEFAULT NULL,
`deleted` int(1) DEFAULT NULL,
`deleted_time` datetime DEFAULT NULL,
PRIMARY KEY (`aid`, `id`),
KEY `pid` (`repo_id`),
KEY `rpnm` (`repo_id`, `name`)
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_artifact_version` (
`id` varchar(64) NOT NULL,
`aid` bigint(20) NOT NULL AUTO_INCREMENT,
`repo_id` varchar(64) DEFAULT NULL,
`package_id` varchar(64) DEFAULT NULL,
`name` varchar(100) DEFAULT NULL,
`version` varchar(100) DEFAULT NULL,
`sha` varchar(100) DEFAULT NULL,
`desc` varchar(500) DEFAULT NULL,
`preview` int(1) DEFAULT NULL,
`created` datetime DEFAULT NULL,
`updated` datetime DEFAULT NULL,
PRIMARY KEY (`aid`, `id`),
KEY `rpnm` (`repo_id`, `name`)
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_artifactory` (
`id` varchar(64) NOT NULL,
`aid` bigint(20) NOT NULL AUTO_INCREMENT,
`uid` varchar(64) DEFAULT NULL,
`org_id` varchar(64) DEFAULT NULL,
`identifier` varchar(50) DEFAULT NULL,
`name` varchar(200) DEFAULT NULL,
`disabled` int(1) DEFAULT '0' COMMENT '是否归档(1归档|0正常)',
`source` varchar(50) DEFAULT NULL,
`desc` varchar(500) DEFAULT NULL,
`logo` varchar(255) DEFAULT NULL,
`created` datetime DEFAULT NULL,
`updated` datetime DEFAULT NULL,
`deleted` int(1) DEFAULT NULL,
`deleted_time` datetime DEFAULT NULL,
PRIMARY KEY (`aid`, `id`) USING BTREE
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_build` (
`id` varchar(64) NOT NULL,
`pipeline_id` varchar(64) NULL DEFAULT NULL,
`pipeline_version_id` varchar(64) NULL DEFAULT NULL,
`status` varchar(100) NULL DEFAULT NULL COMMENT '构建状态',
`error` varchar(500) NULL DEFAULT NULL COMMENT '错误信息',
`event` varchar(100) NULL DEFAULT NULL COMMENT '事件',
`started` datetime(0) NULL DEFAULT NULL COMMENT '开始时间',
`finished` datetime(0) NULL DEFAULT NULL COMMENT '结束时间',
`created` datetime(0) NULL DEFAULT NULL COMMENT '创建时间',
`updated` datetime(0) NULL DEFAULT NULL COMMENT '更新时间',
`version` varchar(255) NULL DEFAULT NULL COMMENT '版本',
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_cmd_line` (
`id` varchar(64) NOT NULL,
`group_id` varchar(64) NULL DEFAULT NULL,
`build_id` varchar(64) NULL DEFAULT NULL,
`step_id` varchar(64) NULL DEFAULT NULL,
`status` varchar(50) NULL DEFAULT NULL,
`num` int(11) NULL DEFAULT NULL,
`code` int(11) NULL DEFAULT NULL,
`content` text NULL,
`created` datetime(0) NULL DEFAULT NULL,
`started` datetime(0) NULL DEFAULT NULL,
`finished` datetime(0) NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_stage` (
`id` varchar(64) NOT NULL,
`pipeline_version_id` varchar(64) NULL DEFAULT NULL COMMENT '流水线id',
`build_id` varchar(64) NULL DEFAULT NULL,
`status` varchar(100) NULL DEFAULT NULL COMMENT '构建状态',
`error` varchar(500) NULL DEFAULT NULL COMMENT '错误信息',
`name` varchar(255) NULL DEFAULT NULL COMMENT '名字',
`display_name` varchar(255) NULL DEFAULT NULL,
`started` datetime(0) NULL DEFAULT NULL COMMENT '开始时间',
`finished` datetime(0) NULL DEFAULT NULL COMMENT '结束时间',
`created` datetime(0) NULL DEFAULT NULL COMMENT '创建时间',
`updated` datetime(0) NULL DEFAULT NULL COMMENT '更新时间',
`sort` int(11) NULL DEFAULT NULL,
`stage` varchar(255) NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_step` (
`id` varchar(64) NOT NULL,
`build_id` varchar(64) NULL DEFAULT NULL,
`stage_id` varchar(100) NULL DEFAULT NULL COMMENT '流水线id',
`display_name` varchar(255) NULL DEFAULT NULL,
`pipeline_version_id` varchar(64) NULL DEFAULT NULL COMMENT '流水线id',
`step` varchar(255) NULL DEFAULT NULL,
`status` varchar(100) NULL DEFAULT NULL COMMENT '构建状态',
`event` varchar(100) NULL DEFAULT NULL COMMENT '事件',
`exit_code` int(11) NULL DEFAULT NULL COMMENT '退出码',
`error` varchar(500) NULL DEFAULT NULL COMMENT '错误信息',
`name` varchar(100) NULL DEFAULT NULL COMMENT '名字',
`started` datetime(0) NULL DEFAULT NULL COMMENT '开始时间',
`finished` datetime(0) NULL DEFAULT NULL COMMENT '结束时间',
`created` datetime(0) NULL DEFAULT NULL COMMENT '创建时间',
`updated` datetime(0) NULL DEFAULT NULL COMMENT '更新时间',
`version` varchar(255) NULL DEFAULT NULL COMMENT '版本',
`errignore` int(11) NULL DEFAULT NULL,
`commands` text NULL,
`waits` json NULL,
`sort` int(11) NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_trigger` (
`id` varchar(64) NOT NULL,
`aid` bigint(20) NOT NULL AUTO_INCREMENT,
`uid` varchar(64) DEFAULT NULL,
`pipeline_id` varchar(64) NOT NULL,
`types` varchar(50) DEFAULT NULL,
`name` varchar(100) DEFAULT NULL,
`desc` varchar(255) DEFAULT NULL,
`params` json DEFAULT NULL,
`enabled` int(1) DEFAULT NULL,
`created` datetime DEFAULT NULL,
`updated` datetime DEFAULT NULL,
PRIMARY KEY (`aid`, `id`),
KEY `uid` (`uid`)
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_trigger_run` (
`id` varchar(64) NOT NULL,
`aid` bigint(20) NOT NULL AUTO_INCREMENT,
`tid` varchar(64) DEFAULT NULL COMMENT '触发器ID',
`pipe_version_id` varchar(64) DEFAULT NULL,
`infos` json DEFAULT NULL,
`error` varchar(255) DEFAULT NULL,
`created` datetime DEFAULT NULL,
PRIMARY KEY (`aid`, `id`),
KEY `tid` (`tid`)
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_message` (
`id` varchar(64) NOT NULL,
`aid` BIGINT NOT NULL AUTO_INCREMENT,
`uid` varchar(64) NULL DEFAULT NULL COMMENT '发送者(可空)',
`title` varchar(255) NULL DEFAULT NULL,
`content` longtext NULL,
`types` varchar(50) NULL DEFAULT NULL,
`created` datetime(0) NULL DEFAULT NULL,
`infos` text NULL,
`url` varchar(500) NULL DEFAULT NULL,
PRIMARY KEY (`aid`, `id`) USING BTREE
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_org` (
`id` varchar(64) NOT NULL,
`aid` BIGINT NOT NULL AUTO_INCREMENT,
`uid` varchar(64) NULL DEFAULT NULL,
`name` varchar(200) NULL DEFAULT NULL,
`desc` TEXT NULL DEFAULT NULL,
`public` INT(1) NULL DEFAULT 0 COMMENT '公开',
`created` datetime(0) NULL DEFAULT NULL COMMENT '创建时间',
`updated` datetime(0) NULL DEFAULT NULL COMMENT '更新时间',
`deleted` int(1) NULL DEFAULT 0,
`deleted_time` datetime(0) NULL DEFAULT NULL,
PRIMARY KEY (`aid`, `id`) USING BTREE,
INDEX `uid`(`uid`) USING BTREE
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_org_pipe` (
`aid` bigint(20) NOT NULL AUTO_INCREMENT,
`org_id` varchar(64) NULL DEFAULT NULL,
`pipe_id` varchar(64) NULL DEFAULT NULL COMMENT '收件人',
`created` datetime(0) NULL DEFAULT NULL,
`public` INT(1) NULL DEFAULT 0 COMMENT '公开',
PRIMARY KEY (`aid`) USING BTREE,
INDEX `org_id`(`org_id`) USING BTREE
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_pipeline` (
`id` varchar(64) NOT NULL,
`uid` varchar(64) DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`display_name` varchar(255) DEFAULT NULL,
`pipeline_type` varchar(255) DEFAULT NULL,
`created` datetime DEFAULT NULL,
`deleted` int(1) DEFAULT '0',
`deleted_time` datetime DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_pipeline_conf` (
`aid` int(20) NOT NULL AUTO_INCREMENT,
`pipeline_id` varchar(64) NOT NULL,
`url` varchar(255) DEFAULT NULL,
`access_token` varchar(255) DEFAULT NULL,
`yml_content` longtext,
`username` varchar(255) DEFAULT NULL,
PRIMARY KEY (`aid`) USING BTREE
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_pipeline_version` (
`id` varchar(64) NOT NULL,
`uid` varchar(64) DEFAULT NULL,
`number` bigint(20) DEFAULT NULL COMMENT '构建次数',
`events` varchar(100) DEFAULT NULL COMMENT '事件push、pr、note',
`sha` varchar(255) DEFAULT NULL,
`pipeline_name` varchar(255) DEFAULT NULL,
`pipeline_display_name` varchar(255) DEFAULT NULL,
`pipeline_id` varchar(64) DEFAULT NULL,
`version` varchar(255) DEFAULT NULL,
`content` longtext,
`created` datetime DEFAULT NULL,
`deleted` tinyint(1) DEFAULT '0',
`pr_number` bigint(20) DEFAULT NULL,
`repo_clone_url` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_param` (
`aid` bigint(20) NOT NULL AUTO_INCREMENT,
`name` varchar(100) NULL DEFAULT NULL,
`title` varchar(255) NULL DEFAULT NULL,
`data` text NULL,
`times` datetime(0) NULL DEFAULT NULL,
PRIMARY KEY (`aid`) USING BTREE
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_user` (
`id` varchar(64) NOT NULL,
`aid` BIGINT NOT NULL AUTO_INCREMENT,
`name` varchar(100) NULL DEFAULT NULL,
`pass` varchar(255) NULL DEFAULT NULL,
`nick` varchar(100) NULL DEFAULT NULL,
`avatar` varchar(500) NULL DEFAULT NULL,
`created` datetime(0) NULL DEFAULT NULL,
`login_time` datetime(0) NULL DEFAULT NULL,
`active` int(1) DEFAULT '0',
PRIMARY KEY (`aid`, `id`) USING BTREE
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
-- ----------------------------
INSERT INTO
`t_user`
VALUES
(
"admin",
1,
'gokins',
'e10adc3949ba59abbe56e057f20f883e',
'管理员',
NULL,
NOW(),
NULL,
1
);
-- ----------------------------
CREATE TABLE `t_user_info` (
`id` varchar(64) NOT NULL,
`phone` varchar(100) DEFAULT NULL,
`email` varchar(200) DEFAULT NULL,
`birthday` datetime DEFAULT NULL,
`remark` text,
`perm_user` int(1) DEFAULT NULL,
`perm_org` int(1) DEFAULT NULL,
`perm_pipe` int(1) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_user_org` (
`aid` bigint(20) NOT NULL AUTO_INCREMENT,
`uid` varchar(64) NULL DEFAULT NULL,
`org_id` varchar(64) NULL DEFAULT NULL,
`created` datetime(0) NULL DEFAULT NULL,
`perm_adm` INT(1) NULL DEFAULT 0 COMMENT '管理员',
`perm_rw` INT(1) NULL DEFAULT 0 COMMENT '编辑权限',
`perm_exec` INT(1) NULL DEFAULT 0 COMMENT '执行权限',
`perm_down` int(1) DEFAULT NULL COMMENT '下载制品权限',
PRIMARY KEY (`aid`) USING BTREE,
INDEX `uid`(`uid`) USING BTREE,
INDEX `oid`(`org_id`) USING BTREE,
INDEX `uoid`(`uid`, `org_id`) USING BTREE
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_user_msg` (
`aid` BIGINT NOT NULL AUTO_INCREMENT,
`uid` varchar(64) NULL DEFAULT NULL COMMENT '收件人',
`msg_id` varchar(64) NULL DEFAULT NULL,
`created` datetime(0) NULL DEFAULT NULL,
`readtm` datetime(0) NULL DEFAULT NULL,
`status` int(11) NULL DEFAULT 0,
`deleted` int(1) NULL DEFAULT 0,
`deleted_time` datetime(0) NULL DEFAULT NULL,
PRIMARY KEY (`aid`) USING BTREE,
INDEX `uid`(`uid`) USING BTREE
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_user_token` (
`aid` bigint(20) NOT NULL AUTO_INCREMENT,
`uid` bigint(20) NULL DEFAULT NULL,
`type` varchar(50) NULL DEFAULT NULL,
`openid` varchar(100) NULL DEFAULT NULL,
`name` varchar(255) NULL DEFAULT NULL,
`nick` varchar(255) NULL DEFAULT NULL,
`avatar` varchar(500) NULL DEFAULT NULL,
`access_token` text NULL DEFAULT NULL,
`refresh_token` text NULL DEFAULT NULL,
`expires_in` bigint(20) NULL DEFAULT 0,
`expires_time` datetime(0) NULL DEFAULT NULL,
`refresh_time` datetime(0) NULL DEFAULT NULL,
`created` datetime(0) NULL DEFAULT NULL,
`tokens` text NULL,
`uinfos` text NULL,
PRIMARY KEY (`aid`) USING BTREE,
INDEX `uid`(`uid`) USING BTREE,
INDEX `openid`(`openid`) USING BTREE
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_pipeline_var` (
`aid` bigint(20) NOT NULL AUTO_INCREMENT,
`uid` varchar(64) DEFAULT NULL,
`pipeline_id` varchar(64) DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`value` varchar(255) DEFAULT NULL,
`remarks` varchar(255) DEFAULT NULL,
`public` int(1) DEFAULT '0' COMMENT '公开',
PRIMARY KEY (`aid`) USING BTREE
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
CREATE TABLE `t_yml_plugin` (
`aid` bigint(20) NOT NULL AUTO_INCREMENT,
`name` varchar(64) DEFAULT NULL,
`yml_content` longtext,
`deleted` int(1) DEFAULT '0',
`deleted_time` datetime DEFAULT NULL,
PRIMARY KEY (`aid`) USING BTREE
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
INSERT INTO
`t_yml_plugin`
VALUES
(
1,
'sh',
' - step: shell@sh\n displayName: sh\n name: sh\n commands:\n - echo hello world',
0,
NULL
);
INSERT INTO
`t_yml_plugin`
VALUES
(
2,
'bash',
' - step: shell@bash\n displayName: bash\n name: bash\n commands:\n - echo hello world',
0,
NULL
);
INSERT INTO
`t_yml_plugin`
VALUES
(
3,
'powershell',
' - step: shell@powershell\n displayName: powershell\n name: powershell\n commands:\n - echo hello world',
1,
NULL
);
INSERT INTO
`t_yml_plugin`
VALUES
(
4,
'ssh',
' - step: shell@ssh\r\n displayName: ssh\r\n name: ssh\r\n input:\r\n host: localhost:22 #端口必填\r\n user: root\r\n pass: 123456\r\n workspace: /root/test #为空就是 $HOME 用户目录\r\n commands:\r\n - echo hello world',
0,
NULL
);
CREATE TABLE `t_yml_template` (
`aid` bigint(20) NOT NULL AUTO_INCREMENT,
`name` varchar(64) DEFAULT NULL,
`yml_content` longtext,
`deleted` int(1) DEFAULT '0',
`deleted_time` datetime DEFAULT NULL,
PRIMARY KEY (`aid`) USING BTREE
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4;
INSERT INTO
`t_yml_template`(
`aid`,
`name`,
`yml_content`,
`deleted`,
`deleted_time`
)
VALUES
(
1,
'Golang',
'version: 1.0\nvars:\nstages:\n - stage:\n displayName: build\n name: build\n steps:\n - step: shell@sh\n displayName: go-build-1\n name: build\n env:\n commands:\n - go build main.go\n - step: shell@sh\n displayName: go-build-2\n name: test\n env:\n commands:\n - go test -v\n',
0,
NULL
);
INSERT INTO
`t_yml_template`(
`aid`,
`name`,
`yml_content`,
`deleted`,
`deleted_time`
)
VALUES
(
2,
'Maven',
'version: 1.0\nvars:\nstages:\n - stage:\n displayName: build\n name: build\n steps:\n - step: shell@sh\n displayName: java-build-1\n name: build\n env:\n commands:\n - mvn clean\n - mvn install\n - step: shell@sh\n displayName: java-build-2\n name: test\n env:\n commands:\n - mvn test -v',
0,
NULL
);
INSERT INTO
`t_yml_template`(
`aid`,
`name`,
`yml_content`,
`deleted`,
`deleted_time`
)
VALUES
(
3,
'Npm',
'version: 1.0\nvars:\nstages:\n - stage:\n displayName: build\n name: build\n steps:\n - step: shell@sh\n displayName: npm-build-1\n name: build\n env:\n commands:\n - npm build\n - step: shell@sh\n displayName: npm-build-2\n name: publish\n env:\n commands:\n - npm publish ',
0,
NULL
);

View File

@ -0,0 +1 @@
DROP TABLE IF EXISTS `t_build`;

View File

@ -0,0 +1,16 @@
CREATE TABLE `t_build` (
`id` TEXT NOT NULL PRIMARY KEY,
`pipeline_id` TEXT DEFAULT NULL,
`pipeline_version_id` TEXT DEFAULT NULL,
`status` TEXT DEFAULT NULL,
`error` TEXT DEFAULT NULL,
`event` TEXT DEFAULT NULL,
`time_stamp` DATETIME DEFAULT NULL,
`title` TEXT NULL DEFAULT NULL,
`message` TEXT NULL DEFAULT NULL,
`started` DATETIME NULL DEFAULT NULL,
`finished` DATETIME NULL DEFAULT NULL,
`created` DATETIME NULL DEFAULT NULL,
`updated` DATETIME NULL DEFAULT NULL,
`version` TEXT NULL DEFAULT NULL
);

120
migrates/up.go Normal file
View File

@ -0,0 +1,120 @@
package migrates
import (
"database/sql"
"errors"
"github.com/gokins-main/gokins/comm"
"github.com/golang-migrate/migrate/v4"
"github.com/golang-migrate/migrate/v4/database/mysql"
"github.com/golang-migrate/migrate/v4/database/sqlite3"
_ "github.com/golang-migrate/migrate/v4/source/file"
bindata "github.com/golang-migrate/migrate/v4/source/go_bindata"
"strings"
)
func UpMysqlMigrate(ul string) error {
if ul == "" {
return errors.New("database config not found")
}
db, err := sql.Open("mysql", ul)
if err != nil {
//core.Log.Errorf("could not connect to postgresql database... %v", err)
println("open db err:" + err.Error())
return err
}
err = db.Ping()
defer db.Close()
if err != nil {
return err
}
// Run migrations
driver, err := mysql.WithInstance(db, &mysql.Config{})
if err != nil {
println("could not start sql migration... ", err.Error())
return err
}
defer driver.Close()
var nms []string
tms := comm.AssetNames()
for _, v := range tms {
if strings.HasPrefix(v, "mysql") {
nms = append(nms, strings.Replace(v, "mysql/", "", 1))
}
}
s := bindata.Resource(nms, func(name string) ([]byte, error) {
return comm.Asset("mysql/" + name)
})
sc, err := bindata.WithInstance(s)
if err != nil {
return err
}
defer sc.Close()
mgt, err := migrate.NewWithInstance(
"bindata", sc,
"mysql", driver)
if err != nil {
return err
}
defer mgt.Close()
err = mgt.Up()
if err != nil && err != migrate.ErrNoChange {
mgt.Down()
return err
}
return nil
}
func UpSqliteMigrate(ul string) error {
if ul == "" {
return errors.New("database config not found")
}
db, err := sql.Open("sqlite3", ul)
if err != nil {
//core.Log.Errorf("could not connect to postgresql database... %v", err)
println("open db err:" + err.Error())
return err
}
err = db.Ping()
defer db.Close()
if err != nil {
return err
}
// Run migrations
driver, err := sqlite3.WithInstance(db, &sqlite3.Config{})
if err != nil {
println("could not start sql migration... ", err.Error())
return err
}
defer driver.Close()
var nms []string
tms := comm.AssetNames()
for _, v := range tms {
if strings.HasPrefix(v, "sqlite") {
nms = append(nms, strings.Replace(v, "sqlite/", "", 1))
}
}
s := bindata.Resource(nms, func(name string) ([]byte, error) {
return comm.Asset("sqlite/" + name)
})
sc, err := bindata.WithInstance(s)
if err != nil {
return err
}
defer sc.Close()
mgt, err := migrate.NewWithInstance(
"bindata", sc,
"sqlite3", driver)
if err != nil {
return err
}
defer mgt.Close()
err = mgt.Up()
if err != nil && err != migrate.ErrNoChange {
mgt.Down()
return err
}
return nil
}

View File

@ -0,0 +1,6 @@
package model
type SchemaMigrations struct {
Version int64 `xorm:"not null pk BIGINT(20)" json:"version"`
Dirty int `xorm:"not null TINYINT(1)" json:"dirty"`
}

View File

@ -1,12 +0,0 @@
package model
import (
"time"
)
type SysParam struct {
Id int `xorm:"pk autoincr"`
Key string
Cont []byte
Times time.Time
}

View File

@ -1,18 +0,0 @@
package model
import (
"time"
)
type SysUser struct {
Id int `xorm:"pk autoincr"`
Xid string
Name string
Pass string
Nick string
Phone string
Times time.Time
Logintm time.Time
Fwtm time.Time
Avat string
}

View File

@ -0,0 +1,18 @@
package model
import (
"time"
)
type TArtifactPackage struct {
Id string `xorm:"not null pk VARCHAR(64)" json:"id"`
Aid int64 `xorm:"not null pk autoincr BIGINT(20)" json:"aid"`
RepoId string `xorm:"index index(rpnm) VARCHAR(64)" json:"repoId"`
Name string `xorm:"index(rpnm) VARCHAR(100)" json:"name"`
DisplayName string `xorm:"VARCHAR(255)" json:"displayName"`
Desc string `xorm:"VARCHAR(500)" json:"desc"`
Created time.Time `xorm:"DATETIME" json:"created"`
Updated time.Time `xorm:"DATETIME" json:"updated"`
Deleted int `xorm:"INT(1)" json:"deleted"`
DeletedTime time.Time `xorm:"DATETIME" json:"deletedTime"`
}

View File

@ -0,0 +1,19 @@
package model
import (
"time"
)
type TArtifactVersion struct {
Id string `xorm:"not null pk VARCHAR(64)" json:"id"`
Aid int64 `xorm:"not null pk autoincr BIGINT(20)" json:"aid"`
RepoId string `xorm:"index(rpnm) VARCHAR(64)" json:"repoId"`
PackageId string `xorm:"VARCHAR(64)" json:"packageId"`
Name string `xorm:"index(rpnm) VARCHAR(100)" json:"name"`
Version string `xorm:"VARCHAR(100)" json:"version"`
Sha string `xorm:"VARCHAR(100)" json:"sha"`
Desc string `xorm:"VARCHAR(500)" json:"desc"`
Preview int `xorm:"INT(1)" json:"preview"`
Created time.Time `xorm:"DATETIME" json:"created"`
Updated time.Time `xorm:"DATETIME" json:"updated"`
}

22
model/t_artifactory.go Normal file
View File

@ -0,0 +1,22 @@
package model
import (
"time"
)
type TArtifactory struct {
Id string `xorm:"not null pk VARCHAR(64)" json:"id"`
Aid int64 `xorm:"not null pk autoincr BIGINT(20)" json:"aid"`
Uid string `xorm:"VARCHAR(64)" json:"uid"`
OrgId string `xorm:"VARCHAR(64)" json:"orgId"`
Identifier string `xorm:"VARCHAR(50)" json:"identifier"`
Name string `xorm:"VARCHAR(200)" json:"name"`
Disabled int `xorm:"default 0 comment('是否归档(1归档|0正常)') INT(1)" json:"disabled"`
Source string `xorm:"VARCHAR(50)" json:"source"`
Desc string `xorm:"VARCHAR(500)" json:"desc"`
Logo string `xorm:"VARCHAR(255)" json:"logo"`
Created time.Time `xorm:"DATETIME" json:"created"`
Updated time.Time `xorm:"DATETIME" json:"updated"`
Deleted int `xorm:"INT(1)" json:"deleted"`
DeletedTime time.Time `xorm:"DATETIME" json:"deletedTime"`
}

19
model/t_build.go Normal file
View File

@ -0,0 +1,19 @@
package model
import (
"time"
)
type TBuild struct {
Id string `xorm:"not null pk VARCHAR(64)" json:"id"`
PipelineId string `xorm:"VARCHAR(64)" json:"pipelineId"`
PipelineVersionId string `xorm:"VARCHAR(64)" json:"pipelineVersionId"`
Status string `xorm:"comment('构建状态') VARCHAR(100)" json:"status"`
Error string `xorm:"comment('错误信息') VARCHAR(500)" json:"error"`
Event string `xorm:"comment('事件') VARCHAR(100)" json:"event"`
Started time.Time `xorm:"comment('开始时间') DATETIME" json:"started"`
Finished time.Time `xorm:"comment('结束时间') DATETIME" json:"finished"`
Created time.Time `xorm:"comment('创建时间') DATETIME" json:"created"`
Updated time.Time `xorm:"comment('更新时间') DATETIME" json:"updated"`
Version string `xorm:"comment('版本') VARCHAR(255)" json:"version"`
}

19
model/t_cmd_line.go Normal file
View File

@ -0,0 +1,19 @@
package model
import (
"time"
)
type TCmdLine struct {
Id string `xorm:"not null pk VARCHAR(64)" json:"id"`
GroupId string `xorm:"VARCHAR(64)" json:"groupId"`
BuildId string `xorm:"VARCHAR(64)" json:"buildId"`
StepId string `xorm:"VARCHAR(64)" json:"stepId"`
Status string `xorm:"VARCHAR(50)" json:"status"`
Num int `xorm:"INT(11)" json:"num"`
Code int `xorm:"INT(11)" json:"code"`
Content string `xorm:"TEXT" json:"content"`
Created time.Time `xorm:"DATETIME" json:"created"`
Started time.Time `xorm:"DATETIME" json:"started"`
Finished time.Time `xorm:"DATETIME" json:"finished"`
}

17
model/t_message.go Normal file
View File

@ -0,0 +1,17 @@
package model
import (
"time"
)
type TMessage struct {
Id string `xorm:"not null pk VARCHAR(64)" json:"id"`
Aid int64 `xorm:"not null pk autoincr BIGINT(20)" json:"aid"`
Uid string `xorm:"comment('发送者(可空)') VARCHAR(64)" json:"uid"`
Title string `xorm:"VARCHAR(255)" json:"title"`
Content string `xorm:"LONGTEXT" json:"content"`
Types string `xorm:"VARCHAR(50)" json:"types"`
Created time.Time `xorm:"DATETIME" json:"created"`
Infos string `xorm:"TEXT" json:"infos"`
Url string `xorm:"VARCHAR(500)" json:"url"`
}

View File

@ -1,29 +0,0 @@
package model
import (
"time"
)
type TModel struct {
Id int `xorm:"pk autoincr"`
Uid string
Title string
Desc string
Times time.Time
Del int
Envs string
Wrkdir string
Clrdir int
}
type TModelRun struct {
Id int `xorm:"pk autoincr"`
Tid int //model id
Uid string
Times time.Time
Timesd time.Time
State int //-1已停止0等待1运行2运行失败4运行成功
Errs string
Tgid int
Tgtyps string
}

18
model/t_org.go Normal file
View File

@ -0,0 +1,18 @@
package model
import (
"time"
)
type TOrg struct {
Id string `xorm:"not null pk VARCHAR(64)" json:"id"`
Aid int64 `xorm:"not null pk autoincr BIGINT(20)" json:"aid"`
Uid string `xorm:"index VARCHAR(64)" json:"uid"`
Name string `xorm:"VARCHAR(200)" json:"name"`
Desc string `xorm:"TEXT" json:"desc"`
Public int `xorm:"default 0 comment('公开') INT(1)" json:"public"`
Created time.Time `xorm:"comment('创建时间') DATETIME" json:"created"`
Updated time.Time `xorm:"comment('更新时间') DATETIME" json:"updated"`
Deleted int `xorm:"default 0 INT(1)" json:"deleted"`
DeletedTime time.Time `xorm:"DATETIME" json:"deletedTime"`
}

13
model/t_org_pipe.go Normal file
View File

@ -0,0 +1,13 @@
package model
import (
"time"
)
type TOrgPipe struct {
Aid int64 `xorm:"not null pk autoincr BIGINT(20)" json:"aid"`
OrgId string `xorm:"index VARCHAR(64)" json:"orgId"`
PipeId string `xorm:"comment('收件人') VARCHAR(64)" json:"pipeId"`
Created time.Time `xorm:"DATETIME" json:"created"`
Public int `xorm:"default 0 comment('公开') INT(1)" json:"public"`
}

13
model/t_param.go Normal file
View File

@ -0,0 +1,13 @@
package model
import (
"time"
)
type TParam struct {
Aid int64 `xorm:"not null pk autoincr BIGINT(20)" json:"aid"`
Name string `xorm:"VARCHAR(100)" json:"name"`
Title string `xorm:"VARCHAR(255)" json:"title"`
Data string `xorm:"TEXT" json:"data"`
Times time.Time `xorm:"DATETIME" json:"times"`
}

16
model/t_pipeline.go Normal file
View File

@ -0,0 +1,16 @@
package model
import (
"time"
)
type TPipeline struct {
Id string `xorm:"not null pk VARCHAR(64)" json:"id"`
Uid string `xorm:"VARCHAR(64)" json:"uid"`
Name string `xorm:"VARCHAR(255)" json:"name"`
DisplayName string `xorm:"VARCHAR(255)" json:"displayName"`
PipelineType string `xorm:"VARCHAR(255)" json:"pipelineType"`
Created time.Time `xorm:"DATETIME" json:"created"`
Deleted int `xorm:"default 0 INT(1)" json:"deleted"`
DeletedTime time.Time `xorm:"DATETIME" json:"deletedTime"`
}

10
model/t_pipeline_conf.go Normal file
View File

@ -0,0 +1,10 @@
package model
type TPipelineConf struct {
Aid int `xorm:"not null pk autoincr INT(20)" json:"aid"`
PipelineId string `xorm:"not null VARCHAR(64)" json:"pipelineId"`
Url string `xorm:"VARCHAR(255)" json:"url"`
AccessToken string `xorm:"VARCHAR(255)" json:"accessToken"`
YmlContent string `xorm:"LONGTEXT" json:"ymlContent"`
Username string `xorm:"VARCHAR(255)" json:"username"`
}

11
model/t_pipeline_var.go Normal file
View File

@ -0,0 +1,11 @@
package model
type TPipelineVar struct {
Aid int64 `xorm:"not null pk autoincr BIGINT(20)" json:"aid"`
Uid string `xorm:"VARCHAR(64)" json:"uid"`
PipelineId string `xorm:"VARCHAR(64)" json:"pipelineId"`
Name string `xorm:"VARCHAR(255)" json:"name"`
Value string `xorm:"VARCHAR(255)" json:"value"`
Remarks string `xorm:"VARCHAR(255)" json:"remarks"`
Public int `xorm:"default 0 comment('公开') INT(1)" json:"public"`
}

View File

@ -0,0 +1,22 @@
package model
import (
"time"
)
type TPipelineVersion struct {
Id string `xorm:"not null pk VARCHAR(64)" json:"id"`
Uid string `xorm:"VARCHAR(64)" json:"uid"`
Number int64 `xorm:"comment('构建次数') BIGINT(20)" json:"number"`
Events string `xorm:"comment('事件push、pr、note') VARCHAR(100)" json:"events"`
Sha string `xorm:"VARCHAR(255)" json:"sha"`
PipelineName string `xorm:"VARCHAR(255)" json:"pipelineName"`
PipelineDisplayName string `xorm:"VARCHAR(255)" json:"pipelineDisplayName"`
PipelineId string `xorm:"VARCHAR(64)" json:"pipelineId"`
Version string `xorm:"VARCHAR(255)" json:"version"`
Content string `xorm:"LONGTEXT" json:"content"`
Created time.Time `xorm:"DATETIME" json:"created"`
Deleted int `xorm:"default 0 TINYINT(1)" json:"deleted"`
PrNumber int64 `xorm:"BIGINT(20)" json:"prNumber"`
RepoCloneUrl string `xorm:"VARCHAR(255)" json:"repoCloneUrl"`
}

View File

@ -1,30 +0,0 @@
package model
import (
"time"
)
type TPlugin struct {
Id int `xorm:"pk autoincr"`
Tid int //model id
Type int
Title string
Para string
Cont string
Times time.Time
Sort int
Del int
Exend int
}
type TPluginRun struct {
Id int `xorm:"pk autoincr"`
Pid int //plugin id
Mid int //model id
Tid int //modelRun id
Times time.Time
Timesd time.Time
State int //-1已停止0等待1运行2运行失败4运行成功
Excode int
//Output string //删除日志入库操作,减轻数据库压力
}

21
model/t_stage.go Normal file
View File

@ -0,0 +1,21 @@
package model
import (
"time"
)
type TStage struct {
Id string `xorm:"not null pk VARCHAR(64)" json:"id"`
PipelineVersionId string `xorm:"comment('流水线id') VARCHAR(64)" json:"pipelineVersionId"`
BuildId string `xorm:"VARCHAR(64)" json:"buildId"`
Status string `xorm:"comment('构建状态') VARCHAR(100)" json:"status"`
Error string `xorm:"comment('错误信息') VARCHAR(500)" json:"error"`
Name string `xorm:"comment('名字') VARCHAR(255)" json:"name"`
DisplayName string `xorm:"VARCHAR(255)" json:"displayName"`
Started time.Time `xorm:"comment('开始时间') DATETIME" json:"started"`
Finished time.Time `xorm:"comment('结束时间') DATETIME" json:"finished"`
Created time.Time `xorm:"comment('创建时间') DATETIME" json:"created"`
Updated time.Time `xorm:"comment('更新时间') DATETIME" json:"updated"`
Sort int `xorm:"INT(11)" json:"sort"`
Stage string `xorm:"VARCHAR(255)" json:"stage"`
}

Some files were not shown because too many files have changed in this diff Show More