2017-09-09 16:40:55 +08:00
|
|
|
|
package main
|
|
|
|
|
|
|
|
|
|
import (
|
2018-05-10 13:31:34 +08:00
|
|
|
|
"crypto/md5"
|
|
|
|
|
"crypto/rand"
|
2019-01-23 21:39:13 +08:00
|
|
|
|
"crypto/sha1"
|
2018-05-10 13:31:34 +08:00
|
|
|
|
"encoding/base64"
|
2019-01-21 16:58:16 +08:00
|
|
|
|
"runtime"
|
2019-02-01 11:31:19 +08:00
|
|
|
|
random "math/rand"
|
2019-01-20 23:45:16 +08:00
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
"errors"
|
2018-05-10 18:19:04 +08:00
|
|
|
|
"flag"
|
2017-09-09 16:40:55 +08:00
|
|
|
|
"fmt"
|
2019-01-21 16:58:16 +08:00
|
|
|
|
|
2017-09-09 16:40:55 +08:00
|
|
|
|
"io"
|
2018-12-30 17:17:40 +08:00
|
|
|
|
"io/ioutil"
|
|
|
|
|
"mime/multipart"
|
|
|
|
|
"net"
|
2018-05-10 13:31:34 +08:00
|
|
|
|
"net/http"
|
2019-01-17 01:05:27 +08:00
|
|
|
|
_ "net/http/pprof"
|
2019-01-09 12:05:20 +08:00
|
|
|
|
"net/smtp"
|
2019-01-02 20:37:50 +08:00
|
|
|
|
"net/url"
|
2019-01-22 09:32:37 +08:00
|
|
|
|
"os"
|
2019-01-09 12:05:20 +08:00
|
|
|
|
"os/signal"
|
2019-01-01 23:31:14 +08:00
|
|
|
|
"path"
|
2018-12-31 17:46:24 +08:00
|
|
|
|
"path/filepath"
|
2018-12-30 17:17:40 +08:00
|
|
|
|
"reflect"
|
2019-01-22 09:32:37 +08:00
|
|
|
|
"regexp"
|
2018-12-30 18:18:42 +08:00
|
|
|
|
"runtime/debug"
|
2019-01-02 17:46:30 +08:00
|
|
|
|
"strconv"
|
2018-05-10 13:31:34 +08:00
|
|
|
|
"strings"
|
2019-01-22 09:32:37 +08:00
|
|
|
|
"sync"
|
2018-12-30 17:17:40 +08:00
|
|
|
|
"sync/atomic"
|
2019-01-22 09:32:37 +08:00
|
|
|
|
"syscall"
|
2018-05-10 13:31:34 +08:00
|
|
|
|
"time"
|
2018-12-30 17:17:40 +08:00
|
|
|
|
"unsafe"
|
|
|
|
|
|
|
|
|
|
"github.com/astaxie/beego/httplib"
|
2019-01-22 09:32:37 +08:00
|
|
|
|
"github.com/deckarep/golang-set"
|
|
|
|
|
"github.com/json-iterator/go"
|
2018-05-10 13:31:34 +08:00
|
|
|
|
log "github.com/sjqzhang/seelog"
|
2019-01-18 10:32:25 +08:00
|
|
|
|
"github.com/syndtr/goleveldb/leveldb"
|
2018-05-10 13:31:34 +08:00
|
|
|
|
)
|
|
|
|
|
|
2018-05-10 18:19:04 +08:00
|
|
|
|
var staticHandler http.Handler
|
2018-12-30 17:17:40 +08:00
|
|
|
|
|
2019-01-20 23:45:16 +08:00
|
|
|
|
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
var server = NewServer()
|
2019-01-17 01:05:27 +08:00
|
|
|
|
|
2018-12-30 18:18:42 +08:00
|
|
|
|
var logacc log.LoggerInterface
|
|
|
|
|
|
2019-01-01 14:41:57 +08:00
|
|
|
|
var FOLDERS = []string{DATA_DIR, STORE_DIR, CONF_DIR}
|
2018-12-30 17:17:40 +08:00
|
|
|
|
|
2019-01-21 15:07:42 +08:00
|
|
|
|
var CONST_QUEUE_SIZE = 100000
|
2019-01-17 01:05:27 +08:00
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
var (
|
|
|
|
|
FileName string
|
|
|
|
|
ptr unsafe.Pointer
|
|
|
|
|
)
|
|
|
|
|
|
2018-05-10 13:31:34 +08:00
|
|
|
|
const (
|
2018-05-10 18:19:04 +08:00
|
|
|
|
STORE_DIR = "files"
|
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
CONF_DIR = "conf"
|
|
|
|
|
|
|
|
|
|
DATA_DIR = "data"
|
|
|
|
|
|
|
|
|
|
CONST_LEVELDB_FILE_NAME = DATA_DIR + "/fileserver.db"
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
CONST_STAT_FILE_NAME = DATA_DIR + "/stat.json"
|
|
|
|
|
|
2019-01-01 14:41:57 +08:00
|
|
|
|
CONST_CONF_FILE_NAME = CONF_DIR + "/cfg.json"
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
CONST_STAT_FILE_COUNT_KEY = "fileCount"
|
|
|
|
|
|
|
|
|
|
CONST_STAT_FILE_TOTAL_SIZE_KEY = "totalSize"
|
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
CONST_Md5_ERROR_FILE_NAME = "errors.md5"
|
|
|
|
|
CONST_Md5_QUEUE_FILE_NAME = "queue.md5"
|
|
|
|
|
CONST_FILE_Md5_FILE_NAME = "files.md5"
|
|
|
|
|
CONST_REMOME_Md5_FILE_NAME = "removes.md5"
|
2018-12-30 17:17:40 +08:00
|
|
|
|
|
2019-02-11 16:02:34 +08:00
|
|
|
|
CONST_MESSAGE_CLUSTER_IP = "Can only be called by the cluster ip,current ip:%s"
|
2019-01-21 15:07:42 +08:00
|
|
|
|
|
2019-01-03 22:56:46 +08:00
|
|
|
|
cfgJson = `{
|
|
|
|
|
"绑定端号": "端口",
|
|
|
|
|
"addr": ":8080",
|
2019-02-03 23:15:35 +08:00
|
|
|
|
"PeerID": "集群内唯一,请使用0-9的单字符,默认自动生成",
|
2019-02-01 11:31:19 +08:00
|
|
|
|
"peer_id": "%s",
|
2019-02-03 23:15:35 +08:00
|
|
|
|
"本主机地址": "本机http地址,默认自动生成,必段为内网,自动生成不为内网请自行修改,下同",
|
2019-02-02 13:09:25 +08:00
|
|
|
|
"host": "%s",
|
2019-02-03 23:15:35 +08:00
|
|
|
|
"集群": "集群列表,注意为了高可用,IP必须不能是同一个,同一不会自动备份,且不能为127.0.0.1,且必须为内容IP,默认自动生成",
|
2019-01-03 22:56:46 +08:00
|
|
|
|
"peers": ["%s"],
|
2019-02-03 13:51:40 +08:00
|
|
|
|
"组号": "用于区别不同的集群(上传或下载)与support_group_upload配合使用,带在下载路径中",
|
2019-01-03 22:56:46 +08:00
|
|
|
|
"group": "group1",
|
2019-01-30 17:30:18 +08:00
|
|
|
|
"重试同步失败文件的时间": "单位秒",
|
2019-01-21 16:58:16 +08:00
|
|
|
|
"refresh_interval": 1800,
|
2019-01-30 17:30:18 +08:00
|
|
|
|
"是否自动重命名": "默认不自动重命名,使用原文件名",
|
2019-01-03 22:56:46 +08:00
|
|
|
|
"rename_file": false,
|
2019-02-03 13:51:40 +08:00
|
|
|
|
"是否支持WEB上传,方便调试": "默认支持web上传",
|
2019-01-03 22:56:46 +08:00
|
|
|
|
"enable_web_upload": true,
|
2019-01-30 17:30:18 +08:00
|
|
|
|
"是否支持非日期路径": "默认支持非日期路径,也即支持自定义路径,需要上传文件时指定path",
|
2019-01-04 10:22:03 +08:00
|
|
|
|
"enable_custom_path": true,
|
2019-01-30 17:30:18 +08:00
|
|
|
|
"下载域名": "用于外网下载文件的域名,不包含http://",
|
2019-01-03 22:56:46 +08:00
|
|
|
|
"download_domain": "",
|
2019-02-02 13:09:25 +08:00
|
|
|
|
"场景列表": "当设定后,用户指的场景必项在列表中,默认不做限制",
|
|
|
|
|
"scenes": [],
|
|
|
|
|
"默认场景": "默认default",
|
|
|
|
|
"default_scene": "default",
|
2019-02-03 13:51:40 +08:00
|
|
|
|
"是否显示目录": "默认显示,方便调试用,上线时请关闭",
|
2019-01-09 12:05:20 +08:00
|
|
|
|
"show_dir": true,
|
2019-02-02 13:09:25 +08:00
|
|
|
|
"邮件配置": "",
|
|
|
|
|
"mail": {
|
|
|
|
|
"user": "abc@163.com",
|
|
|
|
|
"password": "abc",
|
|
|
|
|
"host": "smtp.163.com:25"
|
2019-01-09 12:05:20 +08:00
|
|
|
|
},
|
2019-02-02 13:09:25 +08:00
|
|
|
|
"告警接收邮件列表": "接收人数组",
|
|
|
|
|
"alram_receivers": [],
|
|
|
|
|
"告警接收URL": "方法post,参数:subjet,message",
|
|
|
|
|
"alarm_url": "",
|
|
|
|
|
"下载是否需带token": "真假",
|
|
|
|
|
"download_use_token": false,
|
|
|
|
|
"下载token过期时间": "单位秒",
|
|
|
|
|
"download_token_expire": 600,
|
|
|
|
|
"是否自动修复": "在超过1亿文件时出现性能问题,取消此选项,请手动按天同步,请查看FAQ",
|
|
|
|
|
"auto_repair": true,
|
|
|
|
|
"文件去重算法md5可能存在冲突,默认md5": "sha1|md5",
|
|
|
|
|
"file_sum_arithmetic": "md5",
|
2019-02-12 11:08:19 +08:00
|
|
|
|
"是否支持按组(集群)管理,主要用途是Nginx支持多集群": "默认不支持,不支持时路径为http://10.1.5.4:8080/action,支持时为http://10.1.5.4:8080/group(配置中的group参数)/action,action为动作名,如status,delete,sync等",
|
2019-02-11 16:02:34 +08:00
|
|
|
|
"support_group_manage": false,
|
|
|
|
|
"管理ip列表": "用于管理集的ip白名单,",
|
|
|
|
|
"admin_ips": ["127.0.0.1"]
|
2019-01-21 15:07:42 +08:00
|
|
|
|
|
2019-01-01 14:41:57 +08:00
|
|
|
|
}
|
2018-12-30 17:17:40 +08:00
|
|
|
|
|
|
|
|
|
`
|
|
|
|
|
|
2018-05-10 13:31:34 +08:00
|
|
|
|
logConfigStr = `
|
|
|
|
|
<seelog type="asynctimer" asyncinterval="1000" minlevel="trace" maxlevel="error">
|
|
|
|
|
<outputs formatid="common">
|
|
|
|
|
<buffered formatid="common" size="1048576" flushperiod="1000">
|
|
|
|
|
<rollingfile type="size" filename="./log/fileserver.log" maxsize="104857600" maxrolls="10"/>
|
|
|
|
|
</buffered>
|
|
|
|
|
</outputs>
|
|
|
|
|
<formats>
|
|
|
|
|
<format id="common" format="%Date %Time [%LEV] [%File:%Line] [%Func] %Msg%n" />
|
|
|
|
|
</formats>
|
|
|
|
|
</seelog>
|
2018-12-30 18:18:42 +08:00
|
|
|
|
`
|
|
|
|
|
|
|
|
|
|
logAccessConfigStr = `
|
|
|
|
|
<seelog type="asynctimer" asyncinterval="1000" minlevel="trace" maxlevel="error">
|
|
|
|
|
<outputs formatid="common">
|
|
|
|
|
<buffered formatid="common" size="1048576" flushperiod="1000">
|
|
|
|
|
<rollingfile type="size" filename="./log/access.log" maxsize="104857600" maxrolls="10"/>
|
|
|
|
|
</buffered>
|
|
|
|
|
</outputs>
|
|
|
|
|
<formats>
|
|
|
|
|
<format id="common" format="%Date %Time [%LEV] [%File:%Line] [%Func] %Msg%n" />
|
|
|
|
|
</formats>
|
|
|
|
|
</seelog>
|
2018-05-10 13:31:34 +08:00
|
|
|
|
`
|
2017-09-09 16:40:55 +08:00
|
|
|
|
)
|
|
|
|
|
|
2018-05-10 13:31:34 +08:00
|
|
|
|
type Common struct {
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-10 18:19:04 +08:00
|
|
|
|
type Server struct {
|
2019-01-21 15:07:42 +08:00
|
|
|
|
ldb *leveldb.DB
|
|
|
|
|
util *Common
|
|
|
|
|
statMap *CommonMap
|
2019-01-28 17:01:26 +08:00
|
|
|
|
sumMap *CommonMap //map[string]mapset.Set
|
2019-01-21 15:07:42 +08:00
|
|
|
|
queueToPeers chan FileInfo
|
|
|
|
|
queueFromPeers chan FileInfo
|
2019-02-12 17:22:47 +08:00
|
|
|
|
lockMap *CommonMap
|
2019-01-28 17:01:26 +08:00
|
|
|
|
|
|
|
|
|
curDate string
|
|
|
|
|
host string
|
2018-12-30 17:17:40 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
type FileInfo struct {
|
2019-01-21 15:07:42 +08:00
|
|
|
|
Name string `json:"name"`
|
|
|
|
|
ReName string `json:"rename"`
|
|
|
|
|
Path string `json:"path"`
|
|
|
|
|
Md5 string `json:"md5"`
|
|
|
|
|
Size int64 `json:"size"`
|
|
|
|
|
Peers []string `json:"peers"`
|
|
|
|
|
Scene string `json:"scene"`
|
|
|
|
|
TimeStamp int64 `json:"timeStamp"`
|
2018-12-30 17:17:40 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
type JsonResult struct {
|
2019-01-21 16:58:16 +08:00
|
|
|
|
Message string `json:"message"`
|
|
|
|
|
Status string `json:"status"`
|
|
|
|
|
Data interface{} `json:"data"`
|
2019-01-09 13:13:21 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-08 16:53:03 +08:00
|
|
|
|
type FileResult struct {
|
2019-01-09 16:04:15 +08:00
|
|
|
|
Url string `json:"url"`
|
|
|
|
|
Md5 string `json:"md5"`
|
|
|
|
|
Path string `json:"path"`
|
|
|
|
|
Domain string `json:"domain"`
|
|
|
|
|
Scene string `json:"scene"`
|
|
|
|
|
//Just for Compatibility
|
|
|
|
|
Scenes string `json:"scenes"`
|
|
|
|
|
Retmsg string `json:"retmsg"`
|
|
|
|
|
Retcode int `json:"retcode"`
|
|
|
|
|
Src string `json:"src"`
|
2019-01-08 16:53:03 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-09 12:05:20 +08:00
|
|
|
|
type Mail struct {
|
|
|
|
|
User string `json:"user"`
|
|
|
|
|
Password string `json:"password"`
|
|
|
|
|
Host string `json:"host"`
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
type StatDateFileInfo struct {
|
|
|
|
|
Date string `json:"date"`
|
|
|
|
|
TotalSize int64 `json:"totalSize"`
|
|
|
|
|
FileCount int64 `json:"fileCount"`
|
|
|
|
|
}
|
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
type GloablConfig struct {
|
2019-01-09 17:01:01 +08:00
|
|
|
|
Addr string `json:"addr"`
|
|
|
|
|
Peers []string `json:"peers"`
|
|
|
|
|
Group string `json:"group"`
|
|
|
|
|
RenameFile bool `json:"rename_file"`
|
|
|
|
|
ShowDir bool `json:"show_dir"`
|
|
|
|
|
RefreshInterval int `json:"refresh_interval"`
|
|
|
|
|
EnableWebUpload bool `json:"enable_web_upload"`
|
|
|
|
|
DownloadDomain string `json:"download_domain"`
|
|
|
|
|
EnableCustomPath bool `json:"enable_custom_path"`
|
|
|
|
|
Scenes []string `json:"scenes"`
|
|
|
|
|
AlramReceivers []string `json:"alram_receivers"`
|
|
|
|
|
DefaultScene string `json:"default_scene"`
|
|
|
|
|
Mail Mail `json:"mail"`
|
|
|
|
|
AlarmUrl string `json:"alarm_url"`
|
|
|
|
|
DownloadUseToken bool `json:"download_use_token"`
|
|
|
|
|
DownloadTokenExpire int `json:"download_token_expire"`
|
2019-01-17 01:05:27 +08:00
|
|
|
|
QueueSize int `json:"queue_size"`
|
2019-01-19 09:52:10 +08:00
|
|
|
|
AutoRepair bool `json:"auto_repair"`
|
2019-01-21 16:58:16 +08:00
|
|
|
|
Host string `json:"host"`
|
2019-01-23 21:39:13 +08:00
|
|
|
|
FileSumArithmetic string `json:"file_sum_arithmetic"`
|
2019-02-01 11:31:19 +08:00
|
|
|
|
PeerId string `json:"peer_id"`
|
2019-02-11 16:02:34 +08:00
|
|
|
|
SupportGroupManage bool `json:"support_group_manage"`
|
|
|
|
|
AdminIps []string `json:"admin_ips"`
|
2019-01-01 14:41:57 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
func NewServer() *Server {
|
2019-01-22 19:26:05 +08:00
|
|
|
|
|
2019-01-18 10:32:25 +08:00
|
|
|
|
var (
|
2019-01-22 19:26:05 +08:00
|
|
|
|
ldb *leveldb.DB
|
2019-01-18 10:32:25 +08:00
|
|
|
|
server *Server
|
2019-01-22 19:26:05 +08:00
|
|
|
|
err error
|
2019-01-18 10:32:25 +08:00
|
|
|
|
)
|
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
server = &Server{
|
2019-01-21 15:07:42 +08:00
|
|
|
|
util: &Common{},
|
|
|
|
|
statMap: &CommonMap{m: make(map[string]interface{})},
|
2019-02-12 17:22:47 +08:00
|
|
|
|
lockMap: &CommonMap{m: make(map[string]interface{})},
|
2019-01-21 15:07:42 +08:00
|
|
|
|
queueToPeers: make(chan FileInfo, CONST_QUEUE_SIZE),
|
|
|
|
|
queueFromPeers: make(chan FileInfo, CONST_QUEUE_SIZE),
|
2019-01-20 23:45:16 +08:00
|
|
|
|
//fileset: &CommonMap{m: make(map[string]interface{})},
|
|
|
|
|
//errorset: &CommonMap{m: make(map[string]interface{})},
|
2019-01-28 17:01:26 +08:00
|
|
|
|
|
|
|
|
|
sumMap: &CommonMap{m: make(map[string]interface{}, 365*3)}, // make(map[string]mapset.Set, 365*3),
|
2019-01-18 10:32:25 +08:00
|
|
|
|
}
|
2019-01-19 09:52:10 +08:00
|
|
|
|
settins := httplib.BeegoHTTPSettings{
|
2019-01-18 22:49:09 +08:00
|
|
|
|
UserAgent: "go-fastdfs",
|
|
|
|
|
ConnectTimeout: 10 * time.Second,
|
|
|
|
|
ReadWriteTimeout: 10 * time.Second,
|
|
|
|
|
Gzip: true,
|
|
|
|
|
DumpBody: true,
|
|
|
|
|
}
|
|
|
|
|
httplib.SetDefaultSetting(settins)
|
2019-01-19 20:44:54 +08:00
|
|
|
|
server.statMap.Put(CONST_STAT_FILE_COUNT_KEY, int64(0))
|
2019-01-20 11:05:22 +08:00
|
|
|
|
server.statMap.Put(CONST_STAT_FILE_TOTAL_SIZE_KEY, int64(0))
|
|
|
|
|
server.statMap.Put(server.util.GetToDay()+"_"+CONST_STAT_FILE_COUNT_KEY, int64(0))
|
|
|
|
|
server.statMap.Put(server.util.GetToDay()+"_"+CONST_STAT_FILE_TOTAL_SIZE_KEY, int64(0))
|
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
server.curDate = server.util.GetToDay()
|
2019-01-18 19:05:18 +08:00
|
|
|
|
|
2019-01-24 11:43:42 +08:00
|
|
|
|
//o := &opt.Options{
|
|
|
|
|
// Filter: filter.NewBloomFilter(160),
|
|
|
|
|
//
|
|
|
|
|
//}
|
2019-01-22 09:32:37 +08:00
|
|
|
|
|
2019-01-24 11:43:42 +08:00
|
|
|
|
ldb, err = leveldb.OpenFile(CONST_LEVELDB_FILE_NAME, nil)
|
2019-01-22 09:32:37 +08:00
|
|
|
|
if err != nil {
|
2019-01-22 19:26:05 +08:00
|
|
|
|
fmt.Println(err)
|
2019-01-22 09:32:37 +08:00
|
|
|
|
panic(err)
|
2019-01-22 19:26:05 +08:00
|
|
|
|
log.Error(err)
|
2019-01-22 09:32:37 +08:00
|
|
|
|
}
|
|
|
|
|
server.ldb = ldb
|
|
|
|
|
|
2019-01-18 10:32:25 +08:00
|
|
|
|
return server
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
type CommonMap struct {
|
|
|
|
|
sync.Mutex
|
|
|
|
|
m map[string]interface{}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (s *CommonMap) GetValue(k string) (interface{}, bool) {
|
|
|
|
|
s.Lock()
|
|
|
|
|
defer s.Unlock()
|
|
|
|
|
v, ok := s.m[k]
|
|
|
|
|
return v, ok
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (s *CommonMap) Put(k string, v interface{}) {
|
|
|
|
|
s.Lock()
|
|
|
|
|
defer s.Unlock()
|
|
|
|
|
s.m[k] = v
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-12 17:22:47 +08:00
|
|
|
|
func (s *CommonMap) LockKey(k string) {
|
|
|
|
|
s.Lock()
|
|
|
|
|
if v,ok:=s.m[k];ok {
|
|
|
|
|
s.m[k+"_lock_"]=true
|
|
|
|
|
s.Unlock()
|
|
|
|
|
v.(*sync.Mutex).Lock()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
} else {
|
|
|
|
|
s.m[k]=&sync.Mutex{}
|
|
|
|
|
v=s.m[k]
|
|
|
|
|
s.m[k+"_lock_"]=true
|
|
|
|
|
s.Unlock()
|
|
|
|
|
v.(*sync.Mutex).Lock()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
func (s *CommonMap) UnLockKey(k string) {
|
|
|
|
|
s.Lock()
|
|
|
|
|
if v,ok:=s.m[k];ok {
|
|
|
|
|
v.(*sync.Mutex).Unlock()
|
|
|
|
|
s.m[k+"_lock_"]=false
|
|
|
|
|
}
|
|
|
|
|
s.Unlock()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (s *CommonMap) IsLock(k string) bool {
|
|
|
|
|
s.Lock()
|
|
|
|
|
if v,ok:=s.m[k+"_lock_"];ok {
|
|
|
|
|
s.Unlock()
|
|
|
|
|
return v.(bool)
|
|
|
|
|
}
|
|
|
|
|
s.Unlock()
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
func (s *CommonMap) Keys() []string {
|
|
|
|
|
|
|
|
|
|
s.Lock()
|
|
|
|
|
keys := make([]string, len(s.m))
|
|
|
|
|
defer s.Unlock()
|
|
|
|
|
for k, _ := range s.m {
|
|
|
|
|
keys = append(keys, k)
|
|
|
|
|
}
|
|
|
|
|
return keys
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-20 23:45:16 +08:00
|
|
|
|
func (s *CommonMap) Clear() {
|
|
|
|
|
s.Lock()
|
|
|
|
|
defer s.Unlock()
|
2019-01-21 15:07:42 +08:00
|
|
|
|
s.m = make(map[string]interface{})
|
2019-01-20 23:45:16 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (s *CommonMap) Remove(key string) {
|
|
|
|
|
s.Lock()
|
|
|
|
|
defer s.Unlock()
|
2019-01-21 15:07:42 +08:00
|
|
|
|
if _, ok := s.m[key]; ok {
|
|
|
|
|
delete(s.m, key)
|
2019-01-20 23:45:16 +08:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (s *CommonMap) AddUniq(key string) {
|
|
|
|
|
s.Lock()
|
|
|
|
|
defer s.Unlock()
|
|
|
|
|
if _, ok := s.m[key]; !ok {
|
|
|
|
|
s.m[key] = nil
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
func (s *CommonMap) AddCount(key string, count int) {
|
|
|
|
|
s.Lock()
|
|
|
|
|
defer s.Unlock()
|
|
|
|
|
if _v, ok := s.m[key]; ok {
|
|
|
|
|
v := _v.(int)
|
|
|
|
|
v = v + count
|
|
|
|
|
s.m[key] = v
|
|
|
|
|
} else {
|
|
|
|
|
s.m[key] = 1
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (s *CommonMap) AddCountInt64(key string, count int64) {
|
|
|
|
|
s.Lock()
|
|
|
|
|
defer s.Unlock()
|
|
|
|
|
|
|
|
|
|
if _v, ok := s.m[key]; ok {
|
|
|
|
|
v := _v.(int64)
|
|
|
|
|
v = v + count
|
|
|
|
|
s.m[key] = v
|
|
|
|
|
} else {
|
|
|
|
|
|
|
|
|
|
s.m[key] = count
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (s *CommonMap) Add(key string) {
|
|
|
|
|
s.Lock()
|
|
|
|
|
defer s.Unlock()
|
|
|
|
|
if _v, ok := s.m[key]; ok {
|
|
|
|
|
v := _v.(int)
|
|
|
|
|
v = v + 1
|
|
|
|
|
s.m[key] = v
|
|
|
|
|
} else {
|
|
|
|
|
|
|
|
|
|
s.m[key] = 1
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (s *CommonMap) Zero() {
|
|
|
|
|
s.Lock()
|
|
|
|
|
defer s.Unlock()
|
2019-01-19 09:52:10 +08:00
|
|
|
|
for k := range s.m {
|
2019-01-02 17:46:30 +08:00
|
|
|
|
|
|
|
|
|
s.m[k] = 0
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-20 23:45:16 +08:00
|
|
|
|
func (s *CommonMap) Contains(i ...interface{}) bool {
|
|
|
|
|
s.Lock()
|
|
|
|
|
defer s.Unlock()
|
|
|
|
|
|
|
|
|
|
for _, val := range i {
|
|
|
|
|
if _, ok := s.m[val.(string)]; !ok {
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return true
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
func (s *CommonMap) Get() map[string]interface{} {
|
|
|
|
|
s.Lock()
|
|
|
|
|
defer s.Unlock()
|
|
|
|
|
m := make(map[string]interface{})
|
|
|
|
|
for k, v := range s.m {
|
|
|
|
|
m[k] = v
|
|
|
|
|
}
|
|
|
|
|
return m
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-01 14:41:57 +08:00
|
|
|
|
func Config() *GloablConfig {
|
|
|
|
|
return (*GloablConfig)(atomic.LoadPointer(&ptr))
|
2018-12-30 17:17:40 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func ParseConfig(filePath string) {
|
|
|
|
|
var (
|
|
|
|
|
data []byte
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if filePath == "" {
|
|
|
|
|
data = []byte(strings.TrimSpace(cfgJson))
|
|
|
|
|
} else {
|
|
|
|
|
file, err := os.Open(filePath)
|
|
|
|
|
if err != nil {
|
|
|
|
|
panic(fmt.Sprintln("open file path:", filePath, "error:", err))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
defer file.Close()
|
|
|
|
|
|
|
|
|
|
FileName = filePath
|
|
|
|
|
|
|
|
|
|
data, err = ioutil.ReadAll(file)
|
|
|
|
|
if err != nil {
|
|
|
|
|
panic(fmt.Sprintln("file path:", filePath, " read all error:", err))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var c GloablConfig
|
|
|
|
|
if err := json.Unmarshal(data, &c); err != nil {
|
|
|
|
|
panic(fmt.Sprintln("file path:", filePath, "json unmarshal error:", err))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
log.Info(c)
|
|
|
|
|
|
|
|
|
|
atomic.StorePointer(&ptr, unsafe.Pointer(&c))
|
|
|
|
|
|
|
|
|
|
log.Info("config parse success")
|
2018-05-10 18:19:04 +08:00
|
|
|
|
}
|
|
|
|
|
|
2018-05-10 13:31:34 +08:00
|
|
|
|
func (this *Common) GetUUID() string {
|
|
|
|
|
|
|
|
|
|
b := make([]byte, 48)
|
|
|
|
|
if _, err := io.ReadFull(rand.Reader, b); err != nil {
|
|
|
|
|
return ""
|
|
|
|
|
}
|
2019-01-17 11:50:24 +08:00
|
|
|
|
|
2018-05-10 13:31:34 +08:00
|
|
|
|
id := this.MD5(base64.URLEncoding.EncodeToString(b))
|
|
|
|
|
return fmt.Sprintf("%s-%s-%s-%s-%s", id[0:8], id[8:12], id[12:16], id[16:20], id[20:])
|
|
|
|
|
|
|
|
|
|
}
|
2019-01-09 12:05:20 +08:00
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
func (this *Common) RandInt(min, max int) int {
|
|
|
|
|
|
|
|
|
|
return func(min, max int) int {
|
|
|
|
|
|
|
|
|
|
r := random.New(random.NewSource(time.Now().UnixNano()))
|
|
|
|
|
if min >= max {
|
|
|
|
|
return max
|
|
|
|
|
}
|
|
|
|
|
return r.Intn(max-min) + min
|
|
|
|
|
}(min, max)
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-09 12:05:20 +08:00
|
|
|
|
func (this *Common) GetToDay() string {
|
|
|
|
|
|
|
|
|
|
return time.Now().Format("20060102")
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-21 15:07:42 +08:00
|
|
|
|
func (this *Common) UrlEncode(v interface{}) string {
|
|
|
|
|
|
|
|
|
|
switch v.(type) {
|
|
|
|
|
case string:
|
2019-01-21 16:58:16 +08:00
|
|
|
|
m := make(map[string]string)
|
|
|
|
|
m["name"] = v.(string)
|
|
|
|
|
return strings.Replace(this.UrlEncodeFromMap(m), "name=", "", 1)
|
2019-01-21 15:07:42 +08:00
|
|
|
|
case map[string]string:
|
|
|
|
|
return this.UrlEncodeFromMap(v.(map[string]string))
|
|
|
|
|
default:
|
2019-01-21 16:58:16 +08:00
|
|
|
|
return fmt.Sprintf("%v", v)
|
2019-01-21 15:07:42 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (this *Common) UrlEncodeFromMap(m map[string]string) string {
|
|
|
|
|
vv := url.Values{}
|
|
|
|
|
for k, v := range m {
|
|
|
|
|
vv.Add(k, v)
|
|
|
|
|
}
|
|
|
|
|
return vv.Encode()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (this *Common) UrlDecodeToMap(body string) (map[string]string, error) {
|
|
|
|
|
var (
|
|
|
|
|
err error
|
|
|
|
|
m map[string]string
|
|
|
|
|
v url.Values
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
m = make(map[string]string)
|
|
|
|
|
|
|
|
|
|
if v, err = url.ParseQuery(body); err != nil {
|
|
|
|
|
return m, err
|
|
|
|
|
}
|
|
|
|
|
for _k, _v := range v {
|
|
|
|
|
|
|
|
|
|
if len(_v) > 0 {
|
|
|
|
|
m[_k] = _v[0]
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
return m, nil
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-19 09:13:00 +08:00
|
|
|
|
func (this *Common) GetDayFromTimeStamp(timeStamp int64) string {
|
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
return time.Unix(timeStamp, 0).Format("20060102")
|
2019-01-19 09:13:00 +08:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
func (this *Common) StrToMapSet(str string, sep string) mapset.Set {
|
|
|
|
|
result := mapset.NewSet()
|
|
|
|
|
for _, v := range strings.Split(str, sep) {
|
2019-01-18 19:05:18 +08:00
|
|
|
|
result.Add(v)
|
|
|
|
|
}
|
|
|
|
|
return result
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
func (this *Common) MapSetToStr(set mapset.Set, sep string) string {
|
2019-01-18 19:05:18 +08:00
|
|
|
|
|
|
|
|
|
var (
|
|
|
|
|
ret []string
|
|
|
|
|
)
|
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
for v := range set.Iter() {
|
|
|
|
|
ret = append(ret, v.(string))
|
2019-01-18 19:05:18 +08:00
|
|
|
|
}
|
2019-01-19 09:52:10 +08:00
|
|
|
|
return strings.Join(ret, sep)
|
2019-01-18 19:05:18 +08:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
func (this *Common) GetPulicIP() string {
|
2019-01-21 15:07:42 +08:00
|
|
|
|
|
|
|
|
|
var (
|
2019-01-21 16:58:16 +08:00
|
|
|
|
err error
|
2019-01-21 15:07:42 +08:00
|
|
|
|
conn net.Conn
|
|
|
|
|
)
|
2019-01-21 16:58:16 +08:00
|
|
|
|
if conn, err = net.Dial("udp", "8.8.8.8:80"); err != nil {
|
2019-01-21 15:07:42 +08:00
|
|
|
|
return "127.0.0.1"
|
|
|
|
|
}
|
2018-12-30 17:17:40 +08:00
|
|
|
|
defer conn.Close()
|
|
|
|
|
localAddr := conn.LocalAddr().String()
|
|
|
|
|
idx := strings.LastIndex(localAddr, ":")
|
|
|
|
|
return localAddr[0:idx]
|
|
|
|
|
}
|
2018-05-10 13:31:34 +08:00
|
|
|
|
|
|
|
|
|
func (this *Common) MD5(str string) string {
|
|
|
|
|
|
|
|
|
|
md := md5.New()
|
|
|
|
|
md.Write([]byte(str))
|
|
|
|
|
return fmt.Sprintf("%x", md.Sum(nil))
|
|
|
|
|
}
|
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
func (this *Common) GetFileMd5(file *os.File) string {
|
|
|
|
|
file.Seek(0, 0)
|
|
|
|
|
md5h := md5.New()
|
|
|
|
|
io.Copy(md5h, file)
|
|
|
|
|
sum := fmt.Sprintf("%x", md5h.Sum(nil))
|
|
|
|
|
return sum
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-23 21:39:13 +08:00
|
|
|
|
func (this *Common) GetFileSum(file *os.File, alg string) string {
|
|
|
|
|
alg = strings.ToLower(alg)
|
|
|
|
|
if alg == "sha1" {
|
|
|
|
|
return this.GetFileSha1Sum(file)
|
|
|
|
|
} else {
|
|
|
|
|
return this.GetFileMd5(file)
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-12 11:08:19 +08:00
|
|
|
|
}
|
2019-02-12 12:55:55 +08:00
|
|
|
|
func (this *Common) GetFileSumByName(filepath string, alg string) (string, error) {
|
2019-02-12 11:08:19 +08:00
|
|
|
|
var (
|
2019-02-12 12:55:55 +08:00
|
|
|
|
err error
|
2019-02-12 11:08:19 +08:00
|
|
|
|
file *os.File
|
|
|
|
|
)
|
2019-02-12 12:55:55 +08:00
|
|
|
|
file, err = os.Open(filepath)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return "", err
|
2019-02-12 11:08:19 +08:00
|
|
|
|
}
|
|
|
|
|
defer file.Close()
|
|
|
|
|
alg = strings.ToLower(alg)
|
|
|
|
|
if alg == "sha1" {
|
2019-02-12 12:55:55 +08:00
|
|
|
|
return this.GetFileSha1Sum(file), nil
|
2019-02-12 11:08:19 +08:00
|
|
|
|
} else {
|
2019-02-12 12:55:55 +08:00
|
|
|
|
return this.GetFileMd5(file), nil
|
2019-02-12 11:08:19 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-23 21:39:13 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (this *Common) GetFileSha1Sum(file *os.File) string {
|
|
|
|
|
file.Seek(0, 0)
|
|
|
|
|
md5h := sha1.New()
|
|
|
|
|
io.Copy(md5h, file)
|
|
|
|
|
sum := fmt.Sprintf("%x", md5h.Sum(nil))
|
|
|
|
|
return sum
|
|
|
|
|
}
|
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
func (this *Common) Contains(obj interface{}, arrayobj interface{}) bool {
|
|
|
|
|
targetValue := reflect.ValueOf(arrayobj)
|
|
|
|
|
switch reflect.TypeOf(arrayobj).Kind() {
|
|
|
|
|
case reflect.Slice, reflect.Array:
|
|
|
|
|
for i := 0; i < targetValue.Len(); i++ {
|
|
|
|
|
if targetValue.Index(i).Interface() == obj {
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
case reflect.Map:
|
|
|
|
|
if targetValue.MapIndex(reflect.ValueOf(obj)).IsValid() {
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-10 18:19:04 +08:00
|
|
|
|
func (this *Common) FileExists(fileName string) bool {
|
|
|
|
|
_, err := os.Stat(fileName)
|
|
|
|
|
return err == nil
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-01 14:41:57 +08:00
|
|
|
|
func (this *Common) WriteFile(path string, data string) bool {
|
2019-01-28 19:51:52 +08:00
|
|
|
|
if err := ioutil.WriteFile(path, []byte(data), 0775); err == nil {
|
2019-01-01 14:41:57 +08:00
|
|
|
|
return true
|
|
|
|
|
} else {
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (this *Common) WriteBinFile(path string, data []byte) bool {
|
2019-01-28 19:51:52 +08:00
|
|
|
|
if err := ioutil.WriteFile(path, data, 0775); err == nil {
|
2019-01-01 14:41:57 +08:00
|
|
|
|
return true
|
|
|
|
|
} else {
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-01-02 17:46:30 +08:00
|
|
|
|
|
|
|
|
|
func (this *Common) IsExist(filename string) bool {
|
|
|
|
|
_, err := os.Stat(filename)
|
|
|
|
|
return err == nil || os.IsExist(err)
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-16 15:30:53 +08:00
|
|
|
|
func (this *Common) Match(matcher string, content string) []string {
|
|
|
|
|
var result []string
|
|
|
|
|
if reg, err := regexp.Compile(matcher); err == nil {
|
|
|
|
|
|
|
|
|
|
result = reg.FindAllString(content, -1)
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
return result
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
func (this *Common) ReadBinFile(path string) ([]byte, error) {
|
|
|
|
|
if this.IsExist(path) {
|
|
|
|
|
fi, err := os.Open(path)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, err
|
|
|
|
|
}
|
|
|
|
|
defer fi.Close()
|
|
|
|
|
return ioutil.ReadAll(fi)
|
|
|
|
|
} else {
|
|
|
|
|
return nil, errors.New("not found")
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-12-31 17:46:24 +08:00
|
|
|
|
func (this *Common) RemoveEmptyDir(pathname string) {
|
2019-01-02 17:46:30 +08:00
|
|
|
|
defer func() {
|
|
|
|
|
if re := recover(); re != nil {
|
|
|
|
|
buffer := debug.Stack()
|
|
|
|
|
log.Error("postFileToPeer")
|
|
|
|
|
log.Error(re)
|
|
|
|
|
log.Error(string(buffer))
|
|
|
|
|
}
|
|
|
|
|
}()
|
2018-12-31 17:46:24 +08:00
|
|
|
|
|
|
|
|
|
handlefunc := func(file_path string, f os.FileInfo, err error) error {
|
|
|
|
|
|
|
|
|
|
if f.IsDir() {
|
|
|
|
|
|
|
|
|
|
files, _ := ioutil.ReadDir(file_path)
|
2019-01-02 17:46:30 +08:00
|
|
|
|
if len(files) == 0 && file_path != pathname {
|
2018-12-31 17:46:24 +08:00
|
|
|
|
os.Remove(file_path)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fi, _ := os.Stat(pathname)
|
|
|
|
|
if fi.IsDir() {
|
|
|
|
|
filepath.Walk(pathname, handlefunc)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-17 21:46:29 +08:00
|
|
|
|
func (this *Common) JsonEncodePretty(o interface{}) string {
|
|
|
|
|
|
|
|
|
|
resp := ""
|
|
|
|
|
switch o.(type) {
|
|
|
|
|
case map[string]interface{}:
|
|
|
|
|
if data, err := json.Marshal(o); err == nil {
|
|
|
|
|
resp = string(data)
|
|
|
|
|
}
|
|
|
|
|
case map[string]string:
|
|
|
|
|
if data, err := json.Marshal(o); err == nil {
|
|
|
|
|
resp = string(data)
|
|
|
|
|
}
|
|
|
|
|
case []interface{}:
|
|
|
|
|
if data, err := json.Marshal(o); err == nil {
|
|
|
|
|
resp = string(data)
|
|
|
|
|
}
|
|
|
|
|
case []string:
|
|
|
|
|
if data, err := json.Marshal(o); err == nil {
|
|
|
|
|
resp = string(data)
|
|
|
|
|
}
|
|
|
|
|
case string:
|
|
|
|
|
resp = o.(string)
|
|
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
if data, err := json.Marshal(o); err == nil {
|
|
|
|
|
resp = string(data)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
var v interface{}
|
|
|
|
|
if ok := json.Unmarshal([]byte(resp), &v); ok == nil {
|
|
|
|
|
if buf, ok := json.MarshalIndent(v, "", " "); ok == nil {
|
|
|
|
|
resp = string(buf)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return resp
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2018-12-30 18:18:42 +08:00
|
|
|
|
func (this *Common) GetClientIp(r *http.Request) string {
|
|
|
|
|
|
|
|
|
|
client_ip := ""
|
|
|
|
|
headers := []string{"X_Forwarded_For", "X-Forwarded-For", "X-Real-Ip",
|
|
|
|
|
"X_Real_Ip", "Remote_Addr", "Remote-Addr"}
|
|
|
|
|
for _, v := range headers {
|
|
|
|
|
if _v, ok := r.Header[v]; ok {
|
|
|
|
|
if len(_v) > 0 {
|
|
|
|
|
client_ip = _v[0]
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if client_ip == "" {
|
|
|
|
|
clients := strings.Split(r.RemoteAddr, ":")
|
|
|
|
|
client_ip = clients[0]
|
|
|
|
|
}
|
|
|
|
|
return client_ip
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-16 15:30:53 +08:00
|
|
|
|
func (this *Server) RepairStat() {
|
|
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
|
if re := recover(); re != nil {
|
|
|
|
|
buffer := debug.Stack()
|
|
|
|
|
log.Error("RepairStat")
|
|
|
|
|
log.Error(re)
|
|
|
|
|
log.Error(string(buffer))
|
2019-02-01 13:13:23 +08:00
|
|
|
|
|
2019-01-16 15:30:53 +08:00
|
|
|
|
}
|
|
|
|
|
}()
|
|
|
|
|
|
2019-02-12 17:22:47 +08:00
|
|
|
|
if this.lockMap.IsLock("RepairStat") {
|
|
|
|
|
log.Warn("Lock RepairStat")
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
this.lockMap.LockKey("RepairStat")
|
|
|
|
|
defer this.lockMap.UnLockKey("RepairStat")
|
|
|
|
|
|
2019-01-18 23:58:38 +08:00
|
|
|
|
this.statMap.Put(CONST_STAT_FILE_COUNT_KEY, int64(0))
|
|
|
|
|
this.statMap.Put(CONST_STAT_FILE_TOTAL_SIZE_KEY, int64(0))
|
|
|
|
|
|
2019-01-16 15:30:53 +08:00
|
|
|
|
handlefunc := func(file_path string, f os.FileInfo, err error) error {
|
|
|
|
|
|
|
|
|
|
var (
|
|
|
|
|
files []os.FileInfo
|
|
|
|
|
date []string
|
|
|
|
|
data []byte
|
|
|
|
|
content string
|
|
|
|
|
lines []string
|
|
|
|
|
count int64
|
|
|
|
|
totalSize int64
|
|
|
|
|
line string
|
|
|
|
|
cols []string
|
|
|
|
|
size int64
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if f.IsDir() {
|
|
|
|
|
|
|
|
|
|
if files, err = ioutil.ReadDir(file_path); err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for _, file := range files {
|
|
|
|
|
count = 0
|
|
|
|
|
size = 0
|
|
|
|
|
if file.Name() == CONST_FILE_Md5_FILE_NAME {
|
|
|
|
|
if data, err = ioutil.ReadFile(file_path + "/" + file.Name()); err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
date = this.util.Match("\\d{8}", file_path)
|
|
|
|
|
if len(date) < 1 {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
content = string(data)
|
|
|
|
|
lines = strings.Split(content, "\n")
|
|
|
|
|
count = int64(len(lines))
|
|
|
|
|
if count > 1 {
|
|
|
|
|
count = count - 1
|
|
|
|
|
}
|
2019-01-19 09:52:10 +08:00
|
|
|
|
count = 0
|
2019-01-16 15:30:53 +08:00
|
|
|
|
for _, line = range lines {
|
|
|
|
|
|
|
|
|
|
cols = strings.Split(line, "|")
|
|
|
|
|
if len(cols) > 2 {
|
2019-01-19 09:52:10 +08:00
|
|
|
|
count = count + 1
|
2019-01-16 15:30:53 +08:00
|
|
|
|
if size, err = strconv.ParseInt(cols[1], 10, 64); err != nil {
|
|
|
|
|
size = 0
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
totalSize = totalSize + size
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
2019-01-18 10:32:25 +08:00
|
|
|
|
this.statMap.Put(date[0]+"_"+CONST_STAT_FILE_COUNT_KEY, count)
|
|
|
|
|
this.statMap.Put(date[0]+"_"+CONST_STAT_FILE_TOTAL_SIZE_KEY, totalSize)
|
|
|
|
|
this.statMap.AddCountInt64(CONST_STAT_FILE_COUNT_KEY, count)
|
|
|
|
|
this.statMap.AddCountInt64(CONST_STAT_FILE_TOTAL_SIZE_KEY, totalSize)
|
2019-01-16 15:30:53 +08:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
filepath.Walk(DATA_DIR, handlefunc)
|
|
|
|
|
|
2019-01-18 22:49:09 +08:00
|
|
|
|
this.SaveStat()
|
|
|
|
|
|
2019-01-16 15:30:53 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-21 16:58:16 +08:00
|
|
|
|
func (this *Server) CheckFileExistByMd5(md5s string, fileInfo *FileInfo) bool {
|
2019-01-21 15:07:42 +08:00
|
|
|
|
var (
|
2019-01-21 16:58:16 +08:00
|
|
|
|
err error
|
2019-01-21 15:07:42 +08:00
|
|
|
|
info *FileInfo
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if info, err = this.GetFileInfoFromLevelDB(md5s); err != nil {
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if info != nil && info.Md5 != "" {
|
2019-01-21 16:58:16 +08:00
|
|
|
|
if fileInfo != nil {
|
|
|
|
|
if fileInfo.Path != info.Path {
|
2019-01-21 15:07:42 +08:00
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return true
|
|
|
|
|
} else {
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-12 11:08:19 +08:00
|
|
|
|
func (this *Server) RepairFileInfoFromFile() {
|
|
|
|
|
defer func() {
|
|
|
|
|
if re := recover(); re != nil {
|
|
|
|
|
buffer := debug.Stack()
|
2019-02-12 17:22:47 +08:00
|
|
|
|
log.Error("RepairFileInfoFromFile")
|
2019-02-12 11:08:19 +08:00
|
|
|
|
log.Error(re)
|
|
|
|
|
log.Error(string(buffer))
|
|
|
|
|
}
|
|
|
|
|
}()
|
|
|
|
|
|
2019-02-12 17:22:47 +08:00
|
|
|
|
if this.lockMap.IsLock("RepairFileInfoFromFile") {
|
|
|
|
|
log.Warn("Lock RepairFileInfoFromFile")
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
this.lockMap.LockKey("RepairFileInfoFromFile")
|
|
|
|
|
defer this.lockMap.UnLockKey("RepairFileInfoFromFile")
|
|
|
|
|
|
2019-02-12 11:08:19 +08:00
|
|
|
|
handlefunc := func(file_path string, f os.FileInfo, err error) error {
|
|
|
|
|
|
|
|
|
|
var (
|
2019-02-12 12:55:55 +08:00
|
|
|
|
files []os.FileInfo
|
|
|
|
|
fi os.FileInfo
|
2019-02-12 11:08:19 +08:00
|
|
|
|
fileInfo FileInfo
|
2019-02-12 12:55:55 +08:00
|
|
|
|
sum string
|
2019-02-12 11:08:19 +08:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if f.IsDir() {
|
|
|
|
|
|
|
|
|
|
files, err = ioutil.ReadDir(file_path)
|
2019-02-12 12:55:55 +08:00
|
|
|
|
if err != nil {
|
2019-02-12 11:08:19 +08:00
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
for _, fi = range files {
|
2019-02-12 11:08:19 +08:00
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
if fi.IsDir() || fi.Size() == 0 {
|
2019-02-12 11:08:19 +08:00
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
sum, err = this.util.GetFileSumByName(file_path+"/"+fi.Name(), Config().FileSumArithmetic)
|
|
|
|
|
if err != nil {
|
2019-02-12 11:08:19 +08:00
|
|
|
|
log.Error(err)
|
|
|
|
|
continue
|
|
|
|
|
}
|
2019-02-12 12:55:55 +08:00
|
|
|
|
fileInfo = FileInfo{
|
|
|
|
|
Size: fi.Size(),
|
|
|
|
|
Name: fi.Name(),
|
|
|
|
|
Path: strings.Replace(file_path, "\\", "/", -1),
|
|
|
|
|
Md5: sum,
|
|
|
|
|
TimeStamp: fi.ModTime().Unix(),
|
|
|
|
|
}
|
|
|
|
|
this.SaveFileMd5Log(&fileInfo, CONST_FILE_Md5_FILE_NAME)
|
2019-02-12 11:08:19 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
pathname := STORE_DIR
|
2019-02-12 11:08:19 +08:00
|
|
|
|
fi, _ := os.Stat(pathname)
|
|
|
|
|
if fi.IsDir() {
|
|
|
|
|
filepath.Walk(pathname, handlefunc)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
func (this *Server) DownloadFromPeer(peer string, fileInfo *FileInfo) {
|
|
|
|
|
var (
|
|
|
|
|
err error
|
|
|
|
|
filename string
|
2019-01-08 18:24:51 +08:00
|
|
|
|
fpath string
|
|
|
|
|
fi os.FileInfo
|
2019-02-12 18:35:26 +08:00
|
|
|
|
sum string
|
2019-01-02 17:46:30 +08:00
|
|
|
|
)
|
2019-01-21 15:07:42 +08:00
|
|
|
|
|
2019-01-21 16:58:16 +08:00
|
|
|
|
if this.CheckFileExistByMd5(fileInfo.Md5, fileInfo) {
|
2019-01-21 15:07:42 +08:00
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
if _, err = os.Stat(fileInfo.Path); err != nil {
|
2019-01-28 19:51:52 +08:00
|
|
|
|
os.MkdirAll(fileInfo.Path, 0775)
|
2019-01-02 17:46:30 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
filename = fileInfo.Name
|
|
|
|
|
if fileInfo.ReName != "" {
|
|
|
|
|
filename = fileInfo.ReName
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-07 17:19:06 +08:00
|
|
|
|
p := strings.Replace(fileInfo.Path, STORE_DIR+"/", "", 1)
|
2019-01-21 15:07:42 +08:00
|
|
|
|
|
|
|
|
|
//filename=this.util.UrlEncode(filename)
|
2019-01-07 17:19:06 +08:00
|
|
|
|
req := httplib.Get(peer + "/" + Config().Group + "/" + p + "/" + filename)
|
2019-01-02 17:46:30 +08:00
|
|
|
|
|
2019-01-08 18:24:51 +08:00
|
|
|
|
fpath = fileInfo.Path + "/" + filename
|
|
|
|
|
|
2019-02-12 18:35:26 +08:00
|
|
|
|
req.SetTimeout(time.Second*5, time.Second*300)
|
2019-01-17 11:50:24 +08:00
|
|
|
|
|
2019-01-08 18:24:51 +08:00
|
|
|
|
if err = req.ToFile(fpath); err != nil {
|
2019-01-02 17:46:30 +08:00
|
|
|
|
log.Error(err)
|
2019-02-12 18:35:26 +08:00
|
|
|
|
return
|
2019-01-02 17:46:30 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-08 18:24:51 +08:00
|
|
|
|
if fi, err = os.Stat(fpath); err != nil {
|
|
|
|
|
os.Remove(fpath)
|
|
|
|
|
return
|
|
|
|
|
}
|
2019-02-12 18:35:26 +08:00
|
|
|
|
|
|
|
|
|
if sum,err=this.util.GetFileSumByName(fpath,Config().FileSumArithmetic);err!=nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if fi.Size()!=fileInfo.Size || sum!=fileInfo.Md5 {
|
|
|
|
|
log.Error("file sum check error")
|
2019-01-08 18:24:51 +08:00
|
|
|
|
os.Remove(fpath)
|
2019-02-12 18:35:26 +08:00
|
|
|
|
return
|
2019-01-08 18:24:51 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-31 12:50:03 +08:00
|
|
|
|
if this.util.IsExist(fpath) {
|
|
|
|
|
this.SaveFileMd5Log(fileInfo, CONST_FILE_Md5_FILE_NAME)
|
|
|
|
|
}
|
2019-01-02 17:46:30 +08:00
|
|
|
|
}
|
|
|
|
|
|
2018-05-10 18:19:04 +08:00
|
|
|
|
func (this *Server) Download(w http.ResponseWriter, r *http.Request) {
|
2019-01-01 23:31:14 +08:00
|
|
|
|
|
|
|
|
|
var (
|
2019-01-09 17:01:01 +08:00
|
|
|
|
err error
|
|
|
|
|
pathMd5 string
|
|
|
|
|
info os.FileInfo
|
|
|
|
|
peer string
|
|
|
|
|
fileInfo *FileInfo
|
|
|
|
|
fullpath string
|
|
|
|
|
pathval url.Values
|
|
|
|
|
token string
|
|
|
|
|
timestamp string
|
|
|
|
|
maxTimestamp int64
|
|
|
|
|
minTimestamp int64
|
|
|
|
|
ts int64
|
|
|
|
|
md5sum string
|
|
|
|
|
fp *os.File
|
2019-01-19 09:52:10 +08:00
|
|
|
|
isPeer bool
|
2019-01-01 23:31:14 +08:00
|
|
|
|
)
|
2019-01-02 17:46:30 +08:00
|
|
|
|
|
2019-01-09 17:01:01 +08:00
|
|
|
|
r.ParseForm()
|
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
isPeer = this.IsPeer(r)
|
2019-01-18 23:31:11 +08:00
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
if Config().DownloadUseToken && !isPeer {
|
2019-01-09 17:01:01 +08:00
|
|
|
|
|
|
|
|
|
token = r.FormValue("token")
|
|
|
|
|
timestamp = r.FormValue("timestamp")
|
|
|
|
|
|
|
|
|
|
if token == "" || timestamp == "" {
|
|
|
|
|
w.Write([]byte("unvalid request"))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
maxTimestamp = time.Now().Add(time.Second *
|
|
|
|
|
time.Duration(Config().DownloadTokenExpire)).Unix()
|
|
|
|
|
minTimestamp = time.Now().Add(-time.Second *
|
|
|
|
|
time.Duration(Config().DownloadTokenExpire)).Unix()
|
|
|
|
|
if ts, err = strconv.ParseInt(timestamp, 10, 64); err != nil {
|
|
|
|
|
w.Write([]byte("unvalid timestamp"))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if ts > maxTimestamp || ts < minTimestamp {
|
|
|
|
|
w.Write([]byte("timestamp expire"))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
fullpath = r.RequestURI[len(Config().Group)+2 : len(r.RequestURI)]
|
2019-01-02 20:37:50 +08:00
|
|
|
|
|
2019-01-07 17:19:06 +08:00
|
|
|
|
fullpath = STORE_DIR + "/" + fullpath
|
|
|
|
|
|
2019-01-02 20:37:50 +08:00
|
|
|
|
if pathval, err = url.ParseQuery(fullpath); err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
} else {
|
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
for k := range pathval {
|
2019-01-02 20:37:50 +08:00
|
|
|
|
if k != "" {
|
|
|
|
|
fullpath = k
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-09 17:01:01 +08:00
|
|
|
|
CheckToken := func(token string, md5sum string, timestamp string) bool {
|
|
|
|
|
if this.util.MD5(md5sum+timestamp) != token {
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
if Config().DownloadUseToken && !isPeer {
|
2019-01-09 22:11:40 +08:00
|
|
|
|
fullpath = strings.Split(fullpath, "?")[0]
|
2019-01-09 17:01:01 +08:00
|
|
|
|
pathMd5 = this.util.MD5(fullpath)
|
|
|
|
|
if fileInfo, err = this.GetFileInfoFromLevelDB(pathMd5); err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
if this.util.FileExists(fullpath) {
|
|
|
|
|
if fp, err = os.Create(fullpath); err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
}
|
|
|
|
|
if fp != nil {
|
|
|
|
|
defer fp.Close()
|
|
|
|
|
}
|
2019-01-23 21:39:13 +08:00
|
|
|
|
md5sum = this.util.GetFileSum(fp, Config().FileSumArithmetic)
|
2019-01-09 17:01:01 +08:00
|
|
|
|
if !CheckToken(token, md5sum, timestamp) {
|
|
|
|
|
w.Write([]byte("unvalid request,error token"))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
if !CheckToken(token, fileInfo.Md5, timestamp) {
|
|
|
|
|
w.Write([]byte("unvalid request,error token"))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-01 23:31:14 +08:00
|
|
|
|
if info, err = os.Stat(fullpath); err != nil {
|
|
|
|
|
log.Error(err)
|
2019-01-02 17:46:30 +08:00
|
|
|
|
pathMd5 = this.util.MD5(fullpath)
|
|
|
|
|
for _, peer = range Config().Peers {
|
2019-01-02 18:09:02 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
if fileInfo, err = this.checkPeerFileExist(peer, pathMd5); err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
continue
|
|
|
|
|
}
|
2019-01-09 17:01:01 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
if fileInfo.Md5 != "" {
|
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
if Config().DownloadUseToken && !isPeer {
|
2019-01-09 17:01:01 +08:00
|
|
|
|
if !CheckToken(token, fileInfo.Md5, timestamp) {
|
|
|
|
|
w.Write([]byte("unvalid request,error token"))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
go this.DownloadFromPeer(peer, fileInfo)
|
|
|
|
|
|
|
|
|
|
http.Redirect(w, r, peer+r.RequestURI, 302)
|
2019-01-16 10:28:51 +08:00
|
|
|
|
return
|
2019-01-01 23:31:14 +08:00
|
|
|
|
}
|
2019-01-02 17:46:30 +08:00
|
|
|
|
|
2019-01-01 23:31:14 +08:00
|
|
|
|
}
|
2019-01-08 18:24:51 +08:00
|
|
|
|
w.WriteHeader(404)
|
2019-01-01 23:31:14 +08:00
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if !Config().ShowDir && info.IsDir() {
|
2019-01-03 10:08:01 +08:00
|
|
|
|
w.Write([]byte("list dir deny"))
|
2019-01-01 23:31:14 +08:00
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-10 18:19:04 +08:00
|
|
|
|
log.Info("download:" + r.RequestURI)
|
|
|
|
|
staticHandler.ServeHTTP(w, r)
|
|
|
|
|
}
|
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
func (this *Server) GetServerURI(r *http.Request) string {
|
|
|
|
|
return fmt.Sprintf("http://%s/", r.Host)
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
func (this *Server) CheckFileAndSendToPeer(date string, filename string, isForceUpload bool) {
|
2018-12-30 17:17:40 +08:00
|
|
|
|
|
2019-01-20 11:05:22 +08:00
|
|
|
|
var (
|
2019-01-28 17:01:26 +08:00
|
|
|
|
md5set mapset.Set
|
|
|
|
|
err error
|
|
|
|
|
md5s []interface{}
|
2019-01-20 11:05:22 +08:00
|
|
|
|
)
|
|
|
|
|
|
2018-12-30 18:18:42 +08:00
|
|
|
|
defer func() {
|
|
|
|
|
if re := recover(); re != nil {
|
|
|
|
|
buffer := debug.Stack()
|
|
|
|
|
log.Error("CheckFileAndSendToPeer")
|
|
|
|
|
log.Error(re)
|
|
|
|
|
log.Error(string(buffer))
|
|
|
|
|
}
|
|
|
|
|
}()
|
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
if md5set, err = this.GetMd5sByDate(date, filename); err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
return
|
|
|
|
|
}
|
2018-12-30 23:31:42 +08:00
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
md5s = md5set.ToSlice()
|
2019-01-20 23:45:16 +08:00
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
for _, md := range md5s {
|
2019-01-20 23:45:16 +08:00
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
if md == nil {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
if fileInfo, _ := this.GetFileInfoFromLevelDB(md.(string)); fileInfo != nil && fileInfo.Md5 != "" {
|
2019-02-01 11:31:19 +08:00
|
|
|
|
if isForceUpload {
|
2019-01-28 17:01:26 +08:00
|
|
|
|
fileInfo.Peers = []string{}
|
|
|
|
|
}
|
2019-01-19 20:44:54 +08:00
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
if len(fileInfo.Peers) > len(Config().Peers) {
|
|
|
|
|
continue
|
|
|
|
|
}
|
2019-01-21 15:07:42 +08:00
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
if filename == CONST_Md5_QUEUE_FILE_NAME {
|
|
|
|
|
this.AppendToDownloadQueue(fileInfo)
|
|
|
|
|
} else {
|
2019-01-20 23:45:16 +08:00
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
this.AppendToQueue(fileInfo)
|
2018-12-30 17:17:40 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
}
|
2018-12-30 17:17:40 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-18 22:49:09 +08:00
|
|
|
|
func (this *Server) postFileToPeer(fileInfo *FileInfo) {
|
2018-12-30 17:17:40 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
var (
|
|
|
|
|
err error
|
|
|
|
|
peer string
|
|
|
|
|
filename string
|
|
|
|
|
info *FileInfo
|
|
|
|
|
postURL string
|
|
|
|
|
result string
|
2019-01-02 18:09:02 +08:00
|
|
|
|
fi os.FileInfo
|
2019-01-17 01:05:27 +08:00
|
|
|
|
i int
|
2019-01-21 15:07:42 +08:00
|
|
|
|
data []byte
|
2019-02-01 11:31:19 +08:00
|
|
|
|
fpath string
|
2019-01-02 17:46:30 +08:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
|
if re := recover(); re != nil {
|
|
|
|
|
buffer := debug.Stack()
|
|
|
|
|
log.Error("postFileToPeer")
|
|
|
|
|
log.Error(re)
|
|
|
|
|
log.Error(string(buffer))
|
2018-12-30 17:17:40 +08:00
|
|
|
|
}
|
2019-01-02 17:46:30 +08:00
|
|
|
|
}()
|
|
|
|
|
|
2019-01-17 01:05:27 +08:00
|
|
|
|
for i, peer = range Config().Peers {
|
|
|
|
|
|
|
|
|
|
_ = i
|
2019-01-02 17:46:30 +08:00
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
if fileInfo.Peers == nil {
|
|
|
|
|
fileInfo.Peers = []string{}
|
|
|
|
|
}
|
2019-01-28 17:01:26 +08:00
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
if this.util.Contains(peer, fileInfo.Peers) {
|
|
|
|
|
continue
|
|
|
|
|
}
|
2019-01-02 00:03:48 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
filename = fileInfo.Name
|
2019-01-02 00:03:48 +08:00
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
if fileInfo.ReName != "" {
|
2019-01-02 17:46:30 +08:00
|
|
|
|
filename = fileInfo.ReName
|
2019-01-02 00:03:48 +08:00
|
|
|
|
}
|
2019-01-28 17:01:26 +08:00
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
fpath = fileInfo.Path + "/" + filename
|
2019-01-28 17:01:26 +08:00
|
|
|
|
if !this.util.FileExists(fpath) {
|
2019-02-01 11:31:19 +08:00
|
|
|
|
log.Warn(fmt.Sprintf("file '%s' not found", fpath))
|
2018-12-30 17:17:40 +08:00
|
|
|
|
continue
|
2019-01-02 18:09:02 +08:00
|
|
|
|
} else {
|
|
|
|
|
if fileInfo.Size == 0 {
|
2019-01-28 17:01:26 +08:00
|
|
|
|
if fi, err = os.Stat(fpath); err != nil {
|
2019-01-02 18:09:02 +08:00
|
|
|
|
log.Error(err)
|
|
|
|
|
} else {
|
|
|
|
|
fileInfo.Size = fi.Size()
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-12-30 17:17:40 +08:00
|
|
|
|
}
|
2018-12-30 23:31:42 +08:00
|
|
|
|
|
2019-01-17 01:05:27 +08:00
|
|
|
|
if info, err = this.checkPeerFileExist(peer, fileInfo.Md5); info.Md5 != "" {
|
2018-12-30 23:31:42 +08:00
|
|
|
|
|
2019-01-20 11:05:22 +08:00
|
|
|
|
fileInfo.Peers = append(fileInfo.Peers, peer)
|
|
|
|
|
|
|
|
|
|
if _, err = this.SaveFileInfoToLevelDB(fileInfo.Md5, fileInfo); err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
}
|
|
|
|
|
|
2018-12-30 23:31:42 +08:00
|
|
|
|
continue
|
|
|
|
|
}
|
2019-01-28 17:01:26 +08:00
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
postURL = fmt.Sprintf("%s%s", peer, this.getRequestURI("syncfile_info"))
|
2019-01-02 17:46:30 +08:00
|
|
|
|
b := httplib.Post(postURL)
|
2019-01-21 15:07:42 +08:00
|
|
|
|
b.SetTimeout(time.Second*5, time.Second*5)
|
|
|
|
|
|
|
|
|
|
if data, err = json.Marshal(fileInfo); err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
b.Param("fileInfo", string(data))
|
2019-01-02 17:46:30 +08:00
|
|
|
|
|
|
|
|
|
result, err = b.String()
|
2018-12-30 17:17:40 +08:00
|
|
|
|
|
2019-01-20 11:05:22 +08:00
|
|
|
|
if !strings.HasPrefix(result, "http://") || err != nil {
|
2019-01-19 20:44:54 +08:00
|
|
|
|
|
2019-01-20 11:05:22 +08:00
|
|
|
|
this.SaveFileMd5Log(fileInfo, CONST_Md5_ERROR_FILE_NAME)
|
2019-01-19 20:44:54 +08:00
|
|
|
|
|
2019-01-20 11:05:22 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if strings.HasPrefix(result, "http://") {
|
2018-12-30 17:17:40 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
log.Info(result)
|
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
if !this.util.Contains(peer, fileInfo.Peers) {
|
|
|
|
|
fileInfo.Peers = append(fileInfo.Peers, peer)
|
2019-01-20 11:05:22 +08:00
|
|
|
|
|
|
|
|
|
if _, err = this.SaveFileInfoToLevelDB(fileInfo.Md5, fileInfo); err != nil {
|
2019-01-02 17:46:30 +08:00
|
|
|
|
log.Error(err)
|
2018-12-30 17:17:40 +08:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
if err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-03 10:08:01 +08:00
|
|
|
|
func (this *Server) SaveFileMd5Log(fileInfo *FileInfo, filename string) {
|
|
|
|
|
var (
|
2019-01-28 17:01:26 +08:00
|
|
|
|
err error
|
|
|
|
|
msg string
|
|
|
|
|
tmpFile *os.File
|
|
|
|
|
logpath string
|
|
|
|
|
outname string
|
|
|
|
|
logDate string
|
|
|
|
|
ok bool
|
|
|
|
|
sumKey string
|
|
|
|
|
sumset mapset.Set
|
|
|
|
|
fullpath string
|
|
|
|
|
v interface{}
|
2019-01-03 10:08:01 +08:00
|
|
|
|
)
|
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
logDate = this.util.GetDayFromTimeStamp(fileInfo.TimeStamp)
|
2019-01-19 09:13:00 +08:00
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
sumKey = fmt.Sprintf("%s_%s", logDate, filename)
|
2019-01-23 21:39:13 +08:00
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
if v, ok = this.sumMap.GetValue(sumKey); !ok {
|
|
|
|
|
if sumset, err = this.GetMd5sByDate(logDate, filename); err != nil {
|
2019-01-24 11:43:42 +08:00
|
|
|
|
log.Error(err)
|
|
|
|
|
}
|
2019-02-11 16:02:34 +08:00
|
|
|
|
if sumset != nil {
|
2019-02-01 11:31:19 +08:00
|
|
|
|
this.sumMap.Put(sumKey, sumset)
|
|
|
|
|
}
|
2019-01-28 17:01:26 +08:00
|
|
|
|
} else {
|
|
|
|
|
sumset = v.(mapset.Set)
|
|
|
|
|
if sumset.Cardinality() == 0 {
|
|
|
|
|
sumset, err = this.GetMd5sByDate(logDate, filename)
|
2019-01-19 09:52:10 +08:00
|
|
|
|
}
|
2019-01-19 09:13:00 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
if sumset.Contains(fileInfo.Md5) {
|
2019-01-18 10:32:25 +08:00
|
|
|
|
return
|
|
|
|
|
}
|
2019-01-28 17:01:26 +08:00
|
|
|
|
outname = fileInfo.Name
|
|
|
|
|
if fileInfo.ReName != "" {
|
|
|
|
|
outname = fileInfo.ReName
|
2019-01-21 16:58:16 +08:00
|
|
|
|
}
|
2019-01-28 17:01:26 +08:00
|
|
|
|
fullpath = fileInfo.Path + "/" + outname
|
2019-01-20 23:45:16 +08:00
|
|
|
|
|
2019-01-16 15:30:53 +08:00
|
|
|
|
logpath = DATA_DIR + "/" + time.Unix(fileInfo.TimeStamp, 0).Format("20060102")
|
2019-01-03 10:08:01 +08:00
|
|
|
|
if _, err = os.Stat(logpath); err != nil {
|
2019-01-28 19:51:52 +08:00
|
|
|
|
os.MkdirAll(logpath, 0775)
|
2019-01-03 10:08:01 +08:00
|
|
|
|
}
|
2019-01-28 17:01:26 +08:00
|
|
|
|
msg = fmt.Sprintf("%s|%d|%d|%s\n", fileInfo.Md5, fileInfo.Size, fileInfo.TimeStamp, fullpath)
|
2019-01-16 15:30:53 +08:00
|
|
|
|
if tmpFile, err = os.OpenFile(logpath+"/"+filename, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0644); err != nil {
|
2019-01-03 10:08:01 +08:00
|
|
|
|
log.Error(err)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
defer tmpFile.Close()
|
|
|
|
|
tmpFile.WriteString(msg)
|
2019-01-19 20:44:54 +08:00
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
if filename == CONST_FILE_Md5_FILE_NAME {
|
2019-01-28 17:01:26 +08:00
|
|
|
|
this.SaveFileInfoToLevelDB(fileInfo.Md5, fileInfo)
|
|
|
|
|
this.SaveFileInfoToLevelDB(this.util.MD5(fullpath), fileInfo)
|
2019-01-19 20:44:54 +08:00
|
|
|
|
this.statMap.AddCountInt64(logDate+"_"+CONST_STAT_FILE_COUNT_KEY, 1)
|
|
|
|
|
this.statMap.AddCountInt64(logDate+"_"+CONST_STAT_FILE_TOTAL_SIZE_KEY, fileInfo.Size)
|
|
|
|
|
this.statMap.AddCountInt64(CONST_STAT_FILE_TOTAL_SIZE_KEY, fileInfo.Size)
|
|
|
|
|
this.statMap.AddCountInt64(CONST_STAT_FILE_COUNT_KEY, 1)
|
|
|
|
|
|
|
|
|
|
this.SaveStat()
|
2019-01-18 10:32:25 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
sumset.Add(fileInfo.Md5)
|
2019-01-21 16:58:16 +08:00
|
|
|
|
|
2019-01-03 10:08:01 +08:00
|
|
|
|
}
|
|
|
|
|
|
2018-12-30 23:31:42 +08:00
|
|
|
|
func (this *Server) checkPeerFileExist(peer string, md5sum string) (*FileInfo, error) {
|
|
|
|
|
|
|
|
|
|
var (
|
2019-01-16 12:08:47 +08:00
|
|
|
|
err error
|
|
|
|
|
fileInfo FileInfo
|
2018-12-30 23:31:42 +08:00
|
|
|
|
)
|
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
req := httplib.Post(fmt.Sprintf("%s%s?md5=%s", peer, this.getRequestURI("check_file_exist"), md5sum))
|
2018-12-30 23:31:42 +08:00
|
|
|
|
|
2019-01-23 21:39:13 +08:00
|
|
|
|
req.SetTimeout(time.Second*5, time.Second*10)
|
2018-12-30 23:31:42 +08:00
|
|
|
|
|
2019-01-16 12:08:47 +08:00
|
|
|
|
if err = req.ToJSON(&fileInfo); err != nil {
|
|
|
|
|
return &FileInfo{}, err
|
|
|
|
|
}
|
2019-01-01 23:31:14 +08:00
|
|
|
|
|
2019-01-16 12:08:47 +08:00
|
|
|
|
if fileInfo.Md5 == "" {
|
|
|
|
|
return &fileInfo, errors.New("not found")
|
2018-12-30 23:31:42 +08:00
|
|
|
|
}
|
2019-01-16 12:08:47 +08:00
|
|
|
|
|
|
|
|
|
return &fileInfo, nil
|
2018-12-30 23:31:42 +08:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
func (this *Server) CheckFileExist(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
var (
|
2018-12-30 23:31:42 +08:00
|
|
|
|
data []byte
|
2018-12-30 17:17:40 +08:00
|
|
|
|
err error
|
|
|
|
|
fileInfo *FileInfo
|
2019-01-23 21:39:13 +08:00
|
|
|
|
fpath string
|
2018-12-30 17:17:40 +08:00
|
|
|
|
)
|
|
|
|
|
r.ParseForm()
|
|
|
|
|
md5sum := ""
|
2019-01-23 21:39:13 +08:00
|
|
|
|
md5sum = r.FormValue("md5")
|
2018-12-30 17:17:40 +08:00
|
|
|
|
|
2019-01-20 11:05:22 +08:00
|
|
|
|
if fileInfo, err = this.GetFileInfoFromLevelDB(md5sum); fileInfo != nil {
|
2019-01-23 21:39:13 +08:00
|
|
|
|
fpath = fileInfo.Path + "/" + fileInfo.Name
|
|
|
|
|
if fileInfo.ReName != "" {
|
|
|
|
|
fpath = fileInfo.Path + "/" + fileInfo.ReName
|
|
|
|
|
}
|
|
|
|
|
if this.util.IsExist(fpath) {
|
|
|
|
|
if data, err = json.Marshal(fileInfo); err == nil {
|
|
|
|
|
w.Write(data)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
} else {
|
2019-01-24 11:43:42 +08:00
|
|
|
|
this.RemoveKeyFromLevelDB(md5sum) // when file delete,delete from leveldb
|
2018-12-30 23:31:42 +08:00
|
|
|
|
}
|
2018-12-30 17:17:40 +08:00
|
|
|
|
}
|
2018-12-30 23:31:42 +08:00
|
|
|
|
data, _ = json.Marshal(FileInfo{})
|
|
|
|
|
w.Write(data)
|
2019-01-23 21:39:13 +08:00
|
|
|
|
return
|
2018-12-30 23:31:42 +08:00
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (this *Server) Sync(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
var (
|
|
|
|
|
result JsonResult
|
|
|
|
|
)
|
2018-12-30 17:17:40 +08:00
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
r.ParseForm()
|
|
|
|
|
result.Status = "fail"
|
2019-01-18 22:49:09 +08:00
|
|
|
|
if !this.IsPeer(r) {
|
2019-02-12 12:55:55 +08:00
|
|
|
|
|
|
|
|
|
result.Message = "client must be in cluster"
|
|
|
|
|
w.Write([]byte(this.util.JsonEncodePretty(result)))
|
2019-01-18 22:49:09 +08:00
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
date := ""
|
|
|
|
|
|
2018-12-30 23:31:42 +08:00
|
|
|
|
force := ""
|
2019-02-01 11:31:19 +08:00
|
|
|
|
isForceUpload := false
|
2018-12-30 23:31:42 +08:00
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
force = r.FormValue("force")
|
|
|
|
|
date = r.FormValue("date")
|
2018-12-30 23:31:42 +08:00
|
|
|
|
|
2019-01-17 21:46:29 +08:00
|
|
|
|
if force == "1" {
|
2019-02-01 11:31:19 +08:00
|
|
|
|
isForceUpload = true
|
2018-12-30 23:31:42 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
if date == "" {
|
2019-02-12 12:55:55 +08:00
|
|
|
|
result.Message = "require paramete date &force , ?date=20181230"
|
2019-01-17 21:46:29 +08:00
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
w.Write([]byte(this.util.JsonEncodePretty(result)))
|
2018-12-30 17:17:40 +08:00
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
date = strings.Replace(date, ".", "", -1)
|
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
if isForceUpload {
|
2018-12-30 17:17:40 +08:00
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
go this.CheckFileAndSendToPeer(date, CONST_FILE_Md5_FILE_NAME, isForceUpload)
|
2019-01-18 19:05:18 +08:00
|
|
|
|
|
2019-01-19 10:35:42 +08:00
|
|
|
|
} else {
|
2019-01-18 19:05:18 +08:00
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
go this.CheckFileAndSendToPeer(date, CONST_Md5_ERROR_FILE_NAME, isForceUpload)
|
2018-12-30 17:17:40 +08:00
|
|
|
|
|
|
|
|
|
}
|
2019-02-12 12:55:55 +08:00
|
|
|
|
result.Status = "ok"
|
|
|
|
|
result.Message = "job is running"
|
|
|
|
|
w.Write([]byte(this.util.JsonEncodePretty(result)))
|
2019-01-18 22:49:09 +08:00
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-01 23:31:14 +08:00
|
|
|
|
func (this *Server) GetFileInfoFromLevelDB(key string) (*FileInfo, error) {
|
|
|
|
|
var (
|
|
|
|
|
err error
|
|
|
|
|
data []byte
|
|
|
|
|
|
|
|
|
|
fileInfo FileInfo
|
|
|
|
|
)
|
|
|
|
|
|
2019-01-16 10:28:51 +08:00
|
|
|
|
if data, err = this.ldb.Get([]byte(key), nil); err != nil {
|
2019-01-01 23:31:14 +08:00
|
|
|
|
return nil, err
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if err = json.Unmarshal(data, &fileInfo); err != nil {
|
|
|
|
|
return nil, err
|
|
|
|
|
}
|
2019-01-20 11:05:22 +08:00
|
|
|
|
|
2019-01-01 23:31:14 +08:00
|
|
|
|
return &fileInfo, nil
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
func (this *Server) SaveStat() {
|
|
|
|
|
|
2019-01-16 10:28:51 +08:00
|
|
|
|
SaveStatFunc := func() {
|
2019-01-02 17:46:30 +08:00
|
|
|
|
|
2019-01-16 10:28:51 +08:00
|
|
|
|
defer func() {
|
|
|
|
|
if re := recover(); re != nil {
|
|
|
|
|
buffer := debug.Stack()
|
|
|
|
|
log.Error("SaveStatFunc")
|
|
|
|
|
log.Error(re)
|
|
|
|
|
log.Error(string(buffer))
|
|
|
|
|
}
|
|
|
|
|
}()
|
2019-01-02 17:46:30 +08:00
|
|
|
|
|
2019-01-18 10:32:25 +08:00
|
|
|
|
stat := this.statMap.Get()
|
2019-01-19 20:44:54 +08:00
|
|
|
|
if v, ok := stat[CONST_STAT_FILE_COUNT_KEY]; ok {
|
2019-01-16 10:28:51 +08:00
|
|
|
|
switch v.(type) {
|
2019-01-20 11:05:22 +08:00
|
|
|
|
case int64, int32, int, float64, float32:
|
2019-01-19 09:52:10 +08:00
|
|
|
|
if v.(int64) >= 0 {
|
2019-01-16 10:28:51 +08:00
|
|
|
|
|
|
|
|
|
if data, err := json.Marshal(stat); err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
} else {
|
|
|
|
|
this.util.WriteBinFile(CONST_STAT_FILE_NAME, data)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
2019-01-02 17:46:30 +08:00
|
|
|
|
}
|
2019-01-16 10:28:51 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-18 22:49:09 +08:00
|
|
|
|
SaveStatFunc()
|
2019-01-19 20:44:54 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-23 21:39:13 +08:00
|
|
|
|
func (this *Server) RemoveKeyFromLevelDB(key string) (error) {
|
|
|
|
|
var (
|
2019-01-24 11:43:42 +08:00
|
|
|
|
err error
|
2019-01-23 21:39:13 +08:00
|
|
|
|
)
|
|
|
|
|
|
2019-01-24 11:43:42 +08:00
|
|
|
|
err = this.ldb.Delete([]byte(key), nil)
|
2019-01-23 21:39:13 +08:00
|
|
|
|
return err
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-01 23:31:14 +08:00
|
|
|
|
func (this *Server) SaveFileInfoToLevelDB(key string, fileInfo *FileInfo) (*FileInfo, error) {
|
|
|
|
|
var (
|
|
|
|
|
err error
|
|
|
|
|
data []byte
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if data, err = json.Marshal(fileInfo); err != nil {
|
|
|
|
|
|
|
|
|
|
return fileInfo, err
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-16 10:28:51 +08:00
|
|
|
|
if err = this.ldb.Put([]byte(key), data, nil); err != nil {
|
2019-01-01 23:31:14 +08:00
|
|
|
|
return fileInfo, err
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return fileInfo, nil
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-03 10:08:01 +08:00
|
|
|
|
func (this *Server) IsPeer(r *http.Request) bool {
|
|
|
|
|
var (
|
|
|
|
|
ip string
|
|
|
|
|
peer string
|
|
|
|
|
bflag bool
|
|
|
|
|
)
|
2019-01-28 17:01:26 +08:00
|
|
|
|
|
|
|
|
|
//return true
|
2019-01-03 10:08:01 +08:00
|
|
|
|
ip = this.util.GetClientIp(r)
|
2019-01-18 19:05:18 +08:00
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
if ip == "127.0.0.1" || ip == this.util.GetPulicIP() {
|
2019-01-18 19:05:18 +08:00
|
|
|
|
return true
|
|
|
|
|
}
|
2019-02-11 16:02:34 +08:00
|
|
|
|
|
|
|
|
|
if this.util.Contains(ip, Config().AdminIps) {
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-03 10:08:01 +08:00
|
|
|
|
ip = "http://" + ip
|
|
|
|
|
bflag = false
|
|
|
|
|
|
|
|
|
|
for _, peer = range Config().Peers {
|
|
|
|
|
if strings.HasPrefix(peer, ip) {
|
|
|
|
|
bflag = true
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return bflag
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-18 19:05:18 +08:00
|
|
|
|
func (this *Server) ReceiveMd5s(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
|
|
|
|
var (
|
2019-01-19 09:52:10 +08:00
|
|
|
|
err error
|
2019-01-28 17:01:26 +08:00
|
|
|
|
md5str string
|
2019-01-18 19:05:18 +08:00
|
|
|
|
fileInfo *FileInfo
|
2019-01-28 17:01:26 +08:00
|
|
|
|
md5s []string
|
2019-01-18 19:05:18 +08:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if !this.IsPeer(r) {
|
2019-01-19 09:52:10 +08:00
|
|
|
|
log.Warn(fmt.Sprintf("ReceiveMd5s %s", this.util.GetClientIp(r)))
|
2019-02-11 16:02:34 +08:00
|
|
|
|
w.Write([]byte(this.GetClusterNotPermitMessage(r)))
|
2019-01-18 19:05:18 +08:00
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
r.ParseForm()
|
2019-01-28 17:01:26 +08:00
|
|
|
|
md5str = r.FormValue("md5s")
|
|
|
|
|
md5s = strings.Split(md5str, ",")
|
2019-01-18 19:05:18 +08:00
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
AppendFunc := func(md5s []string) {
|
|
|
|
|
for _, m := range md5s {
|
|
|
|
|
if m != "" {
|
|
|
|
|
if fileInfo, err = this.GetFileInfoFromLevelDB(m); err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
this.AppendToQueue(fileInfo)
|
|
|
|
|
}
|
2019-01-19 09:52:10 +08:00
|
|
|
|
}
|
2019-01-18 19:05:18 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
go AppendFunc(md5s)
|
|
|
|
|
|
2019-01-18 19:05:18 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
func (this *Server) GetClusterNotPermitMessage(r *http.Request) string {
|
|
|
|
|
var (
|
|
|
|
|
message string
|
2019-02-11 16:02:34 +08:00
|
|
|
|
)
|
2019-02-12 12:55:55 +08:00
|
|
|
|
message = fmt.Sprintf(CONST_MESSAGE_CLUSTER_IP, this.util.GetClientIp(r))
|
|
|
|
|
return message
|
2019-02-11 16:02:34 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-18 19:05:18 +08:00
|
|
|
|
func (this *Server) GetMd5sForWeb(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
|
|
|
|
var (
|
2019-01-19 09:52:10 +08:00
|
|
|
|
date string
|
|
|
|
|
err error
|
2019-01-18 19:05:18 +08:00
|
|
|
|
result mapset.Set
|
2019-01-19 09:52:10 +08:00
|
|
|
|
lines []string
|
2019-01-28 17:01:26 +08:00
|
|
|
|
md5s []interface{}
|
2019-01-18 19:05:18 +08:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if !this.IsPeer(r) {
|
2019-02-11 16:02:34 +08:00
|
|
|
|
w.Write([]byte(this.GetClusterNotPermitMessage(r)))
|
2019-01-18 19:05:18 +08:00
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
}
|
2019-01-19 09:52:10 +08:00
|
|
|
|
date = r.FormValue("date")
|
2019-01-18 19:05:18 +08:00
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
if result, err = this.GetMd5sByDate(date, CONST_FILE_Md5_FILE_NAME); err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
return
|
|
|
|
|
}
|
2019-01-28 17:01:26 +08:00
|
|
|
|
|
|
|
|
|
md5s = result.ToSlice()
|
|
|
|
|
|
|
|
|
|
for _, line := range md5s {
|
|
|
|
|
if line != nil && line != "" {
|
|
|
|
|
lines = append(lines, line.(string))
|
|
|
|
|
}
|
2019-01-19 09:52:10 +08:00
|
|
|
|
}
|
|
|
|
|
w.Write([]byte( strings.Join(lines, ",") ))
|
2019-01-18 19:05:18 +08:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (this *Server) GetMd5File(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
|
|
|
|
var (
|
2019-01-19 09:52:10 +08:00
|
|
|
|
date string
|
2019-01-18 19:05:18 +08:00
|
|
|
|
fpath string
|
2019-01-19 09:52:10 +08:00
|
|
|
|
data []byte
|
|
|
|
|
err error
|
2019-01-18 19:05:18 +08:00
|
|
|
|
)
|
|
|
|
|
if !this.IsPeer(r) {
|
|
|
|
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
fpath = DATA_DIR + "/" + date + "/" + CONST_FILE_Md5_FILE_NAME
|
2019-01-18 19:05:18 +08:00
|
|
|
|
|
|
|
|
|
if !this.util.FileExists(fpath) {
|
|
|
|
|
w.WriteHeader(404)
|
|
|
|
|
return
|
|
|
|
|
}
|
2019-01-19 09:52:10 +08:00
|
|
|
|
if data, err = ioutil.ReadFile(fpath); err != nil {
|
2019-01-18 19:05:18 +08:00
|
|
|
|
w.WriteHeader(500)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
w.Write(data)
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-20 23:45:16 +08:00
|
|
|
|
func (this *Server) GetMd5sMapByDate(date string, filename string) (*CommonMap, error) {
|
|
|
|
|
|
|
|
|
|
var (
|
|
|
|
|
err error
|
|
|
|
|
result *CommonMap
|
|
|
|
|
fpath string
|
|
|
|
|
content string
|
|
|
|
|
lines []string
|
|
|
|
|
line string
|
|
|
|
|
cols []string
|
|
|
|
|
data []byte
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
result = &CommonMap{m: make(map[string]interface{})}
|
|
|
|
|
if filename == "" {
|
|
|
|
|
fpath = DATA_DIR + "/" + date + "/" + CONST_FILE_Md5_FILE_NAME
|
|
|
|
|
} else {
|
|
|
|
|
fpath = DATA_DIR + "/" + date + "/" + filename
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if !this.util.FileExists(fpath) {
|
|
|
|
|
return result, errors.New(fmt.Sprintf("fpath %s not found", fpath))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if data, err = ioutil.ReadFile(fpath); err != nil {
|
|
|
|
|
return result, err
|
|
|
|
|
}
|
|
|
|
|
content = string(data)
|
|
|
|
|
lines = strings.Split(content, "\n")
|
|
|
|
|
for _, line = range lines {
|
|
|
|
|
|
|
|
|
|
cols = strings.Split(line, "|")
|
|
|
|
|
if len(cols) > 2 {
|
|
|
|
|
if _, err = strconv.ParseInt(cols[1], 10, 64); err != nil {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
result.Add(cols[0])
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return result, nil
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
func (this *Server) GetMd5sByDate(date string, filename string) (mapset.Set, error) {
|
2019-01-18 19:05:18 +08:00
|
|
|
|
|
|
|
|
|
var (
|
2019-01-19 09:52:10 +08:00
|
|
|
|
err error
|
|
|
|
|
result mapset.Set
|
|
|
|
|
fpath string
|
2019-01-18 19:05:18 +08:00
|
|
|
|
content string
|
2019-01-19 09:52:10 +08:00
|
|
|
|
lines []string
|
|
|
|
|
line string
|
|
|
|
|
cols []string
|
|
|
|
|
data []byte
|
2019-01-28 17:01:26 +08:00
|
|
|
|
sumkey string
|
|
|
|
|
ok bool
|
|
|
|
|
mds []interface{}
|
|
|
|
|
v interface{}
|
2019-01-18 19:05:18 +08:00
|
|
|
|
)
|
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
sumkey = fmt.Sprintf("%s_%s", date, filename)
|
|
|
|
|
|
|
|
|
|
if v, ok = this.sumMap.GetValue(sumkey); ok {
|
|
|
|
|
result = v.(mapset.Set)
|
|
|
|
|
|
|
|
|
|
if result.Cardinality() > 0 {
|
|
|
|
|
return result, nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
result = mapset.NewSet()
|
|
|
|
|
if filename == "" {
|
|
|
|
|
fpath = DATA_DIR + "/" + date + "/" + CONST_FILE_Md5_FILE_NAME
|
|
|
|
|
} else {
|
|
|
|
|
fpath = DATA_DIR + "/" + date + "/" + filename
|
|
|
|
|
}
|
2019-01-18 19:05:18 +08:00
|
|
|
|
|
|
|
|
|
if !this.util.FileExists(fpath) {
|
2019-01-19 09:52:10 +08:00
|
|
|
|
return result, errors.New(fmt.Sprintf("fpath %s not found", fpath))
|
2019-01-18 19:05:18 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
if data, err = ioutil.ReadFile(fpath); err != nil {
|
|
|
|
|
return result, err
|
2019-01-18 19:05:18 +08:00
|
|
|
|
}
|
2019-01-19 09:52:10 +08:00
|
|
|
|
content = string(data)
|
|
|
|
|
lines = strings.Split(content, "\n")
|
2019-01-28 17:01:26 +08:00
|
|
|
|
if len(lines) > 0 {
|
|
|
|
|
mds = make([]interface{}, len(lines)-1)
|
|
|
|
|
} else {
|
|
|
|
|
return result, nil
|
|
|
|
|
}
|
2019-01-18 19:05:18 +08:00
|
|
|
|
for _, line = range lines {
|
|
|
|
|
|
|
|
|
|
cols = strings.Split(line, "|")
|
|
|
|
|
if len(cols) > 2 {
|
|
|
|
|
if _, err = strconv.ParseInt(cols[1], 10, 64); err != nil {
|
|
|
|
|
continue
|
|
|
|
|
}
|
2019-01-28 17:01:26 +08:00
|
|
|
|
mds = append(mds, cols[0])
|
2019-01-18 19:05:18 +08:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-01-28 17:01:26 +08:00
|
|
|
|
result = mapset.NewSetFromSlice(mds)
|
|
|
|
|
this.sumMap.Put(sumkey, result)
|
2019-01-19 09:52:10 +08:00
|
|
|
|
return result, nil
|
2019-01-18 19:05:18 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-21 15:07:42 +08:00
|
|
|
|
func (this *Server) SyncFileInfo(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
|
|
|
|
var (
|
|
|
|
|
err error
|
|
|
|
|
fileInfo FileInfo
|
|
|
|
|
fileInfoStr string
|
|
|
|
|
filename string
|
|
|
|
|
)
|
|
|
|
|
r.ParseForm()
|
|
|
|
|
|
|
|
|
|
if !this.IsPeer(r) {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
fileInfoStr = r.FormValue("fileInfo")
|
|
|
|
|
|
|
|
|
|
if err = json.Unmarshal([]byte(fileInfoStr), &fileInfo); err != nil {
|
2019-02-11 16:02:34 +08:00
|
|
|
|
w.Write([]byte(this.GetClusterNotPermitMessage(r)))
|
2019-01-21 15:07:42 +08:00
|
|
|
|
log.Error(err)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
this.SaveFileMd5Log(&fileInfo, CONST_Md5_QUEUE_FILE_NAME)
|
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
go this.AppendToDownloadQueue(&fileInfo)
|
2019-01-21 15:07:42 +08:00
|
|
|
|
|
|
|
|
|
filename = fileInfo.Name
|
|
|
|
|
|
|
|
|
|
if fileInfo.ReName != "" {
|
|
|
|
|
filename = fileInfo.ReName
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p := strings.Replace(fileInfo.Path, STORE_DIR+"/", "", 1)
|
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
downloadUrl := fmt.Sprintf("http://%s/%s", r.Host, Config().Group+"/"+p+"/"+filename)
|
2019-01-21 15:07:42 +08:00
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
w.Write([]byte(downloadUrl))
|
2019-01-21 15:07:42 +08:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
func (this *Server) SyncFile(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
var (
|
2019-01-16 15:30:53 +08:00
|
|
|
|
err error
|
|
|
|
|
outPath string
|
2019-01-18 19:05:18 +08:00
|
|
|
|
//outname string
|
2019-01-16 15:30:53 +08:00
|
|
|
|
// timestamp string
|
2019-01-02 17:46:30 +08:00
|
|
|
|
fileInfo FileInfo
|
|
|
|
|
tmpFile *os.File
|
|
|
|
|
uploadFile multipart.File
|
|
|
|
|
)
|
2019-01-03 10:08:01 +08:00
|
|
|
|
|
|
|
|
|
if !this.IsPeer(r) {
|
|
|
|
|
log.Error(fmt.Sprintf(" not is peer,ip:%s", this.util.GetClientIp(r)))
|
2019-02-11 16:02:34 +08:00
|
|
|
|
w.Write([]byte(this.GetClusterNotPermitMessage(r)))
|
2019-01-03 10:08:01 +08:00
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
if r.Method == "POST" {
|
2019-01-16 15:30:53 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
fileInfo.Path = r.Header.Get("Sync-Path")
|
|
|
|
|
fileInfo.Md5 = r.PostFormValue("md5")
|
|
|
|
|
fileInfo.Name = r.PostFormValue("name")
|
2019-01-20 11:05:22 +08:00
|
|
|
|
fileInfo.Scene = r.PostFormValue("scene")
|
|
|
|
|
fileInfo.Size, err = strconv.ParseInt(r.PostFormValue("size"), 10, 64)
|
2019-01-16 15:30:53 +08:00
|
|
|
|
fileInfo.TimeStamp, err = strconv.ParseInt(r.PostFormValue("timestamp"), 10, 64)
|
2019-01-19 20:44:54 +08:00
|
|
|
|
|
2019-01-16 15:30:53 +08:00
|
|
|
|
if err != nil {
|
|
|
|
|
fileInfo.TimeStamp = time.Now().Unix()
|
|
|
|
|
log.Error(err)
|
|
|
|
|
}
|
2019-01-02 18:49:02 +08:00
|
|
|
|
if uploadFile, _, err = r.FormFile("file"); err != nil {
|
|
|
|
|
w.Write([]byte(err.Error()))
|
|
|
|
|
log.Error(err)
|
|
|
|
|
return
|
|
|
|
|
}
|
2019-01-02 17:46:30 +08:00
|
|
|
|
fileInfo.Peers = []string{}
|
|
|
|
|
|
|
|
|
|
defer uploadFile.Close()
|
|
|
|
|
|
2019-01-28 19:51:52 +08:00
|
|
|
|
os.MkdirAll(fileInfo.Path, 0775)
|
2019-01-02 17:46:30 +08:00
|
|
|
|
|
|
|
|
|
outPath = fileInfo.Path + "/" + fileInfo.Name
|
|
|
|
|
|
2019-01-23 21:39:13 +08:00
|
|
|
|
sum := ""
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
if this.util.FileExists(outPath) {
|
|
|
|
|
if tmpFile, err = os.Open(outPath); err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
w.Write([]byte(err.Error()))
|
|
|
|
|
return
|
|
|
|
|
}
|
2019-01-23 21:39:13 +08:00
|
|
|
|
sum = this.util.GetFileSum(tmpFile, Config().FileSumArithmetic)
|
|
|
|
|
if sum != fileInfo.Md5 {
|
2019-01-02 17:46:30 +08:00
|
|
|
|
tmpFile.Close()
|
|
|
|
|
log.Error("md5 !=fileInfo.Md5 ")
|
|
|
|
|
w.Write([]byte("md5 !=fileInfo.Md5 "))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if tmpFile, err = os.Create(outPath); err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
w.Write([]byte(err.Error()))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
defer tmpFile.Close()
|
|
|
|
|
|
|
|
|
|
if _, err = io.Copy(tmpFile, uploadFile); err != nil {
|
|
|
|
|
w.Write([]byte(err.Error()))
|
|
|
|
|
log.Error(err)
|
2019-01-16 15:30:53 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
return
|
|
|
|
|
}
|
2019-01-23 21:39:13 +08:00
|
|
|
|
|
|
|
|
|
sum = this.util.GetFileSum(tmpFile, Config().FileSumArithmetic)
|
|
|
|
|
if sum != fileInfo.Md5 {
|
2019-01-19 20:44:54 +08:00
|
|
|
|
log.Error("md5 error")
|
2019-01-02 17:46:30 +08:00
|
|
|
|
w.Write([]byte("md5 error"))
|
|
|
|
|
tmpFile.Close()
|
|
|
|
|
os.Remove(outPath)
|
2019-01-16 15:30:53 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
}
|
2019-01-16 15:30:53 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
if fileInfo.Peers == nil {
|
|
|
|
|
fileInfo.Peers = []string{fmt.Sprintf("http://%s", r.Host)}
|
|
|
|
|
}
|
|
|
|
|
if _, err = this.SaveFileInfoToLevelDB(this.util.MD5(outPath), &fileInfo); err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
}
|
|
|
|
|
if _, err = this.SaveFileInfoToLevelDB(fileInfo.Md5, &fileInfo); err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
}
|
2019-01-31 12:50:03 +08:00
|
|
|
|
if this.util.IsExist(outPath) {
|
|
|
|
|
this.SaveFileMd5Log(&fileInfo, CONST_FILE_Md5_FILE_NAME)
|
|
|
|
|
}
|
2019-01-03 10:08:01 +08:00
|
|
|
|
|
2019-01-07 17:19:06 +08:00
|
|
|
|
p := strings.Replace(fileInfo.Path, STORE_DIR+"/", "", 1)
|
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
downloadUrl := fmt.Sprintf("http://%s/%s", r.Host, Config().Group+"/"+p+"/"+fileInfo.Name)
|
2019-01-19 20:44:54 +08:00
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
w.Write([]byte(downloadUrl))
|
2019-01-02 17:46:30 +08:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-08 10:23:13 +08:00
|
|
|
|
func (this *Server) CheckScene(scene string) (bool, error) {
|
|
|
|
|
|
|
|
|
|
if len(Config().Scenes) == 0 {
|
|
|
|
|
return true, nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if !this.util.Contains(scene, Config().Scenes) {
|
|
|
|
|
return false, errors.New("not valid scene")
|
|
|
|
|
}
|
|
|
|
|
return true, nil
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-08 16:53:03 +08:00
|
|
|
|
func (this *Server) RemoveFile(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
var (
|
|
|
|
|
err error
|
|
|
|
|
md5sum string
|
|
|
|
|
fileInfo *FileInfo
|
|
|
|
|
fpath string
|
2019-02-12 12:55:55 +08:00
|
|
|
|
delUrl string
|
|
|
|
|
result JsonResult
|
|
|
|
|
inner string
|
2019-02-11 17:39:00 +08:00
|
|
|
|
)
|
2019-02-12 12:55:55 +08:00
|
|
|
|
_ = delUrl
|
|
|
|
|
_ = inner
|
2019-01-08 16:53:03 +08:00
|
|
|
|
r.ParseForm()
|
|
|
|
|
|
|
|
|
|
md5sum = r.FormValue("md5")
|
2019-02-11 17:39:00 +08:00
|
|
|
|
inner = r.FormValue("inner")
|
2019-02-12 12:55:55 +08:00
|
|
|
|
result.Status = "fail"
|
2019-01-08 16:53:03 +08:00
|
|
|
|
|
2019-01-23 21:39:13 +08:00
|
|
|
|
if len(md5sum) < 32 {
|
2019-02-12 12:55:55 +08:00
|
|
|
|
result.Message = "md5 unvalid"
|
|
|
|
|
w.Write([]byte(this.util.JsonEncodePretty(result)))
|
2019-01-08 16:53:03 +08:00
|
|
|
|
return
|
|
|
|
|
}
|
2019-01-20 11:05:22 +08:00
|
|
|
|
if fileInfo, err = this.GetFileInfoFromLevelDB(md5sum); err != nil {
|
2019-01-08 16:53:03 +08:00
|
|
|
|
w.Write([]byte(err.Error()))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if fileInfo.ReName != "" {
|
|
|
|
|
fpath = fileInfo.Path + "/" + fileInfo.ReName
|
|
|
|
|
} else {
|
|
|
|
|
fpath = fileInfo.Path + "/" + fileInfo.Name
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if fileInfo.Path != "" && this.util.FileExists(fpath) {
|
2019-02-12 12:55:55 +08:00
|
|
|
|
this.ldb.Delete([]byte(fileInfo.Md5), nil)
|
2019-01-08 16:53:03 +08:00
|
|
|
|
if err = os.Remove(fpath); err != nil {
|
|
|
|
|
w.Write([]byte(err.Error()))
|
|
|
|
|
return
|
|
|
|
|
} else {
|
2019-02-11 17:39:00 +08:00
|
|
|
|
//if inner!="1" {
|
|
|
|
|
// for _, peer := range Config().Peers {
|
|
|
|
|
// delUrl = fmt.Sprintf("%s%s", peer, this.getRequestURI("delete"))
|
|
|
|
|
// req := httplib.Post(delUrl)
|
|
|
|
|
// req.Param("md5", fileInfo.Md5)
|
|
|
|
|
// req.Param("inner", "1")
|
|
|
|
|
// req.SetTimeout(time.Second*5, time.Second*10)
|
|
|
|
|
// if _, err = req.String(); err != nil {
|
|
|
|
|
// log.Error(err)
|
|
|
|
|
// }
|
|
|
|
|
// }
|
|
|
|
|
//}
|
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
this.SaveFileMd5Log(fileInfo, CONST_REMOME_Md5_FILE_NAME)
|
|
|
|
|
result.Message = "remove success"
|
|
|
|
|
result.Status = "ok"
|
|
|
|
|
w.Write([]byte(this.util.JsonEncodePretty(result)))
|
2019-01-08 16:53:03 +08:00
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-02-12 12:55:55 +08:00
|
|
|
|
result.Message = "fail remove"
|
|
|
|
|
w.Write([]byte(this.util.JsonEncodePretty(result)))
|
2019-01-08 16:53:03 +08:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-11 16:02:34 +08:00
|
|
|
|
func (this *Server) getRequestURI(action string) string {
|
|
|
|
|
var (
|
|
|
|
|
uri string
|
|
|
|
|
)
|
|
|
|
|
if Config().SupportGroupManage {
|
2019-02-12 12:55:55 +08:00
|
|
|
|
uri = "/" + Config().Group + "/" + action
|
2019-02-11 16:02:34 +08:00
|
|
|
|
} else {
|
|
|
|
|
uri = "/" + action
|
|
|
|
|
}
|
|
|
|
|
return uri
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-10 18:19:04 +08:00
|
|
|
|
func (this *Server) Upload(w http.ResponseWriter, r *http.Request) {
|
2019-01-02 17:46:30 +08:00
|
|
|
|
|
|
|
|
|
var (
|
|
|
|
|
err error
|
|
|
|
|
|
|
|
|
|
// pathname string
|
|
|
|
|
outname string
|
|
|
|
|
md5sum string
|
|
|
|
|
fileInfo FileInfo
|
|
|
|
|
uploadFile multipart.File
|
|
|
|
|
uploadHeader *multipart.FileHeader
|
2019-01-08 10:23:13 +08:00
|
|
|
|
scene string
|
2019-01-08 16:53:03 +08:00
|
|
|
|
output string
|
|
|
|
|
fileResult FileResult
|
|
|
|
|
data []byte
|
2019-01-09 16:04:15 +08:00
|
|
|
|
domain string
|
2019-01-02 17:46:30 +08:00
|
|
|
|
)
|
2019-01-31 12:50:03 +08:00
|
|
|
|
|
2017-09-09 16:40:55 +08:00
|
|
|
|
if r.Method == "POST" {
|
2019-01-01 23:31:14 +08:00
|
|
|
|
// name := r.PostFormValue("name")
|
2019-01-02 17:46:30 +08:00
|
|
|
|
|
2019-01-04 10:22:03 +08:00
|
|
|
|
// fileInfo.Path = r.Header.Get("Sync-Path")
|
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
if strings.Contains(r.Host, "127.0.0.1") {
|
2019-01-28 17:01:26 +08:00
|
|
|
|
w.Write([]byte( "(error) upload use clust ip(peers ip),not 127.0.0.1"))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-04 10:22:03 +08:00
|
|
|
|
if Config().EnableCustomPath {
|
2019-01-08 16:53:03 +08:00
|
|
|
|
fileInfo.Path = r.FormValue("path")
|
|
|
|
|
fileInfo.Path = strings.Trim(fileInfo.Path, "/")
|
2019-01-04 10:22:03 +08:00
|
|
|
|
}
|
2019-01-08 16:53:03 +08:00
|
|
|
|
scene = r.FormValue("scene")
|
2019-01-09 16:04:15 +08:00
|
|
|
|
if scene == "" {
|
|
|
|
|
//Just for Compatibility
|
|
|
|
|
scene = r.FormValue("scenes")
|
|
|
|
|
}
|
2019-01-08 16:53:03 +08:00
|
|
|
|
md5sum = r.FormValue("md5")
|
|
|
|
|
output = r.FormValue("output")
|
|
|
|
|
|
|
|
|
|
fileInfo.Md5 = md5sum
|
2019-01-09 16:04:15 +08:00
|
|
|
|
if uploadFile, uploadHeader, err = r.FormFile("file"); err != nil {
|
2019-02-01 13:13:23 +08:00
|
|
|
|
log.Error(err)
|
2019-01-09 16:04:15 +08:00
|
|
|
|
w.Write([]byte(err.Error()))
|
|
|
|
|
return
|
|
|
|
|
}
|
2019-01-02 17:46:30 +08:00
|
|
|
|
fileInfo.Peers = []string{}
|
2019-01-16 15:30:53 +08:00
|
|
|
|
fileInfo.TimeStamp = time.Now().Unix()
|
2018-12-30 17:17:40 +08:00
|
|
|
|
|
2019-01-08 10:23:13 +08:00
|
|
|
|
if scene == "" {
|
|
|
|
|
scene = Config().DefaultScene
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-08 16:53:03 +08:00
|
|
|
|
if output == "" {
|
|
|
|
|
output = "text"
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if !this.util.Contains(output, []string{"json", "text"}) {
|
|
|
|
|
w.Write([]byte("output just support json or text"))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-08 10:23:13 +08:00
|
|
|
|
fileInfo.Scene = scene
|
|
|
|
|
|
|
|
|
|
if _, err = this.CheckScene(scene); err != nil {
|
|
|
|
|
|
|
|
|
|
w.Write([]byte(err.Error()))
|
|
|
|
|
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-09 16:04:15 +08:00
|
|
|
|
if Config().DownloadDomain != "" {
|
|
|
|
|
domain = fmt.Sprintf("http://%s", Config().DownloadDomain)
|
|
|
|
|
} else {
|
|
|
|
|
domain = fmt.Sprintf("http://%s", r.Host)
|
|
|
|
|
}
|
|
|
|
|
|
2017-09-09 16:40:55 +08:00
|
|
|
|
if err != nil {
|
2018-05-10 13:31:34 +08:00
|
|
|
|
log.Error(err)
|
2017-09-09 16:40:55 +08:00
|
|
|
|
http.Redirect(w, r, "/", http.StatusMovedPermanently)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
SaveUploadFile := func(file multipart.File, header *multipart.FileHeader, fileInfo *FileInfo) (*FileInfo, error) {
|
|
|
|
|
var (
|
|
|
|
|
err error
|
|
|
|
|
outFile *os.File
|
|
|
|
|
folder string
|
2019-01-20 11:05:22 +08:00
|
|
|
|
fi os.FileInfo
|
2019-01-02 17:46:30 +08:00
|
|
|
|
)
|
2018-05-10 13:31:34 +08:00
|
|
|
|
|
2018-12-31 17:46:24 +08:00
|
|
|
|
defer file.Close()
|
2018-12-30 17:17:40 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
fileInfo.Name = header.Filename
|
|
|
|
|
|
2019-01-02 00:03:48 +08:00
|
|
|
|
if Config().RenameFile {
|
2019-01-02 17:46:30 +08:00
|
|
|
|
fileInfo.ReName = this.util.MD5(this.util.GetUUID()) + path.Ext(fileInfo.Name)
|
2018-12-30 17:17:40 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
folder = time.Now().Format("20060102/15/04")
|
2019-02-11 16:02:34 +08:00
|
|
|
|
if Config().PeerId != "" {
|
2019-02-01 13:13:23 +08:00
|
|
|
|
folder = fmt.Sprintf(folder+"/%s", Config().PeerId)
|
|
|
|
|
}
|
2019-01-08 10:23:13 +08:00
|
|
|
|
if fileInfo.Scene != "" {
|
|
|
|
|
folder = fmt.Sprintf(STORE_DIR+"/%s/%s", fileInfo.Scene, folder)
|
|
|
|
|
} else {
|
|
|
|
|
folder = fmt.Sprintf(STORE_DIR+"/%s", folder)
|
|
|
|
|
}
|
2019-01-04 10:22:03 +08:00
|
|
|
|
if fileInfo.Path != "" {
|
|
|
|
|
if strings.HasPrefix(fileInfo.Path, STORE_DIR) {
|
|
|
|
|
folder = fileInfo.Path
|
|
|
|
|
} else {
|
2019-01-08 10:23:13 +08:00
|
|
|
|
|
2019-01-04 10:22:03 +08:00
|
|
|
|
folder = STORE_DIR + "/" + fileInfo.Path
|
|
|
|
|
}
|
2018-12-29 20:31:29 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-18 10:32:25 +08:00
|
|
|
|
if !this.util.FileExists(folder) {
|
2019-01-28 19:51:52 +08:00
|
|
|
|
os.MkdirAll(folder, 0775)
|
2018-12-30 17:17:40 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
outPath := fmt.Sprintf(folder+"/%s", fileInfo.Name)
|
2019-01-02 00:03:48 +08:00
|
|
|
|
if Config().RenameFile {
|
2019-01-02 17:46:30 +08:00
|
|
|
|
outPath = fmt.Sprintf(folder+"/%s", fileInfo.ReName)
|
2019-01-02 00:03:48 +08:00
|
|
|
|
}
|
2018-12-30 17:17:40 +08:00
|
|
|
|
|
2019-01-01 23:31:14 +08:00
|
|
|
|
if this.util.FileExists(outPath) {
|
|
|
|
|
for i := 0; i < 10000; i++ {
|
2019-01-02 17:46:30 +08:00
|
|
|
|
outPath = fmt.Sprintf(folder+"/%d_%s", i, header.Filename)
|
|
|
|
|
fileInfo.Name = fmt.Sprintf("%d_%s", i, header.Filename)
|
2019-01-01 23:31:14 +08:00
|
|
|
|
if !this.util.FileExists(outPath) {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
log.Info(fmt.Sprintf("upload: %s", outPath))
|
2018-05-10 13:31:34 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
if outFile, err = os.Create(outPath); err != nil {
|
|
|
|
|
return fileInfo, err
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
defer outFile.Close()
|
2018-12-31 17:46:24 +08:00
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
if err != nil {
|
|
|
|
|
log.Error(err)
|
2019-01-02 17:46:30 +08:00
|
|
|
|
return fileInfo, errors.New("(error)fail," + err.Error())
|
2018-12-30 17:17:40 +08:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
if _, err = io.Copy(outFile, file); err != nil {
|
2018-12-30 17:17:40 +08:00
|
|
|
|
log.Error(err)
|
2019-01-02 17:46:30 +08:00
|
|
|
|
return fileInfo, errors.New("(error)fail," + err.Error())
|
2018-12-30 17:17:40 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-20 11:05:22 +08:00
|
|
|
|
if fi, err = outFile.Stat(); err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
} else {
|
|
|
|
|
fileInfo.Size = fi.Size()
|
|
|
|
|
}
|
2019-01-23 21:39:13 +08:00
|
|
|
|
v := this.util.GetFileSum(outFile, Config().FileSumArithmetic)
|
2019-01-20 11:05:22 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
fileInfo.Md5 = v
|
|
|
|
|
fileInfo.Path = folder
|
2018-05-10 13:31:34 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
fileInfo.Peers = append(fileInfo.Peers, fmt.Sprintf("http://%s", r.Host))
|
2018-12-31 17:46:24 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
return fileInfo, nil
|
2017-09-09 16:40:55 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
}
|
2018-12-30 17:17:40 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
SaveUploadFile(uploadFile, uploadHeader, &fileInfo)
|
2018-12-30 17:17:40 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
if v, _ := this.GetFileInfoFromLevelDB(fileInfo.Md5); v != nil && v.Md5 != "" {
|
2018-12-31 11:26:51 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
if Config().RenameFile {
|
|
|
|
|
os.Remove(fileInfo.Path + "/" + fileInfo.ReName)
|
2018-12-30 17:17:40 +08:00
|
|
|
|
} else {
|
2019-01-02 17:46:30 +08:00
|
|
|
|
os.Remove(fileInfo.Path + "/" + fileInfo.Name)
|
|
|
|
|
}
|
|
|
|
|
outname = v.Name
|
|
|
|
|
if v.ReName != "" {
|
|
|
|
|
outname = v.ReName
|
2018-12-30 17:17:40 +08:00
|
|
|
|
}
|
2019-01-07 17:19:06 +08:00
|
|
|
|
p := strings.Replace(v.Path, STORE_DIR+"/", "", 1)
|
2019-01-09 16:04:15 +08:00
|
|
|
|
p = Config().Group + "/" + p + "/" + outname
|
2019-02-01 11:31:19 +08:00
|
|
|
|
downloadUrl := fmt.Sprintf("http://%s/%s", r.Host, p)
|
2019-01-03 11:20:11 +08:00
|
|
|
|
if Config().DownloadDomain != "" {
|
2019-02-01 11:31:19 +08:00
|
|
|
|
downloadUrl = fmt.Sprintf("http://%s/%s", Config().DownloadDomain, p)
|
2019-01-03 11:20:11 +08:00
|
|
|
|
}
|
2019-01-08 16:53:03 +08:00
|
|
|
|
if output == "json" {
|
2019-02-01 11:31:19 +08:00
|
|
|
|
fileResult.Url = downloadUrl
|
2019-01-08 16:53:03 +08:00
|
|
|
|
fileResult.Md5 = v.Md5
|
2019-01-09 16:04:15 +08:00
|
|
|
|
fileResult.Path = "/" + p
|
|
|
|
|
fileResult.Domain = domain
|
|
|
|
|
fileResult.Scene = fileInfo.Scene
|
|
|
|
|
|
|
|
|
|
// Just for Compatibility
|
|
|
|
|
fileResult.Src = fileResult.Path
|
|
|
|
|
fileResult.Scenes = fileInfo.Scene
|
2018-12-30 18:18:42 +08:00
|
|
|
|
|
2019-01-08 16:53:03 +08:00
|
|
|
|
if data, err = json.Marshal(fileResult); err != nil {
|
|
|
|
|
w.Write([]byte(err.Error()))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
w.Write(data)
|
|
|
|
|
|
|
|
|
|
} else {
|
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
w.Write([]byte(downloadUrl))
|
2019-01-08 16:53:03 +08:00
|
|
|
|
}
|
2019-01-02 17:46:30 +08:00
|
|
|
|
return
|
2018-12-30 17:17:40 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
if fileInfo.Md5 == "" {
|
|
|
|
|
log.Warn(" fileInfo.Md5 is null")
|
|
|
|
|
return
|
2018-12-31 18:00:13 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
if md5sum != "" && fileInfo.Md5 != md5sum {
|
|
|
|
|
log.Warn(" fileInfo.Md5 and md5sum !=")
|
2018-12-31 18:00:13 +08:00
|
|
|
|
return
|
|
|
|
|
}
|
2018-05-10 13:31:34 +08:00
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
go this.postFileToPeer(&fileInfo)
|
2019-01-02 17:46:30 +08:00
|
|
|
|
|
|
|
|
|
outname = fileInfo.Name
|
2018-12-30 17:17:40 +08:00
|
|
|
|
|
2019-01-02 00:03:48 +08:00
|
|
|
|
if Config().RenameFile {
|
2019-01-02 17:46:30 +08:00
|
|
|
|
outname = fileInfo.ReName
|
2019-01-02 00:03:48 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
if fi, err := os.Stat(fileInfo.Path + "/" + outname); err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
} else {
|
2019-01-02 18:09:02 +08:00
|
|
|
|
fileInfo.Size = fi.Size()
|
2019-01-31 12:50:03 +08:00
|
|
|
|
this.SaveFileMd5Log(&fileInfo, CONST_FILE_Md5_FILE_NAME)
|
2019-01-02 17:46:30 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-07 17:19:06 +08:00
|
|
|
|
p := strings.Replace(fileInfo.Path, STORE_DIR+"/", "", 1)
|
2019-01-09 16:04:15 +08:00
|
|
|
|
p = Config().Group + "/" + p + "/" + outname
|
2019-02-01 11:31:19 +08:00
|
|
|
|
downloadUrl := fmt.Sprintf("http://%s/%s", r.Host, p)
|
2019-01-03 11:20:11 +08:00
|
|
|
|
if Config().DownloadDomain != "" {
|
2019-02-01 11:31:19 +08:00
|
|
|
|
downloadUrl = fmt.Sprintf("http://%s/%s", Config().DownloadDomain, p)
|
2019-01-03 11:20:11 +08:00
|
|
|
|
}
|
2019-01-08 16:53:03 +08:00
|
|
|
|
|
|
|
|
|
if output == "json" {
|
2019-02-01 11:31:19 +08:00
|
|
|
|
fileResult.Url = downloadUrl
|
2019-01-08 16:53:03 +08:00
|
|
|
|
fileResult.Md5 = fileInfo.Md5
|
2019-01-09 16:04:15 +08:00
|
|
|
|
fileResult.Path = "/" + p
|
|
|
|
|
fileResult.Domain = domain
|
|
|
|
|
fileResult.Scene = fileInfo.Scene
|
|
|
|
|
// Just for Compatibility
|
|
|
|
|
fileResult.Src = fileResult.Path
|
|
|
|
|
fileResult.Scenes = fileInfo.Scene
|
2019-01-08 16:53:03 +08:00
|
|
|
|
|
|
|
|
|
if data, err = json.Marshal(fileResult); err != nil {
|
|
|
|
|
w.Write([]byte(err.Error()))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
w.Write(data)
|
|
|
|
|
|
|
|
|
|
} else {
|
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
w.Write([]byte(downloadUrl))
|
2019-01-08 16:53:03 +08:00
|
|
|
|
}
|
2018-12-31 17:46:24 +08:00
|
|
|
|
return
|
2018-05-10 13:31:34 +08:00
|
|
|
|
|
|
|
|
|
} else {
|
2018-12-29 20:31:29 +08:00
|
|
|
|
w.Write([]byte("(error)fail,please use post method"))
|
2018-05-10 13:31:34 +08:00
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
}
|
2018-05-10 13:31:34 +08:00
|
|
|
|
|
2019-01-09 12:05:20 +08:00
|
|
|
|
func (this *Server) SendToMail(to, subject, body, mailtype string) error {
|
|
|
|
|
host := Config().Mail.Host
|
|
|
|
|
user := Config().Mail.User
|
|
|
|
|
password := Config().Mail.Password
|
|
|
|
|
hp := strings.Split(host, ":")
|
|
|
|
|
auth := smtp.PlainAuth("", user, password, hp[0])
|
2019-02-01 11:31:19 +08:00
|
|
|
|
var contentType string
|
2019-01-09 12:05:20 +08:00
|
|
|
|
if mailtype == "html" {
|
2019-02-01 11:31:19 +08:00
|
|
|
|
contentType = "Content-Type: text/" + mailtype + "; charset=UTF-8"
|
2019-01-09 12:05:20 +08:00
|
|
|
|
} else {
|
2019-02-01 11:31:19 +08:00
|
|
|
|
contentType = "Content-Type: text/plain" + "; charset=UTF-8"
|
2019-01-09 12:05:20 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
msg := []byte("To: " + to + "\r\nFrom: " + user + ">\r\nSubject: " + "\r\n" + contentType + "\r\n\r\n" + body)
|
|
|
|
|
sendTo := strings.Split(to, ";")
|
|
|
|
|
err := smtp.SendMail(host, auth, user, sendTo, msg)
|
2019-01-09 12:05:20 +08:00
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-04 10:22:03 +08:00
|
|
|
|
func (this *Server) BenchMark(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
t := time.Now()
|
|
|
|
|
batch := new(leveldb.Batch)
|
|
|
|
|
|
|
|
|
|
for i := 0; i < 100000000; i++ {
|
|
|
|
|
f := FileInfo{}
|
|
|
|
|
f.Peers = []string{"http://192.168.0.1", "http://192.168.2.5"}
|
|
|
|
|
f.Path = "20190201/19/02"
|
|
|
|
|
s := strconv.Itoa(i)
|
2019-01-18 10:32:25 +08:00
|
|
|
|
s = this.util.MD5(s)
|
2019-01-04 10:22:03 +08:00
|
|
|
|
f.Name = s
|
|
|
|
|
f.Md5 = s
|
|
|
|
|
|
|
|
|
|
// server.SaveFileInfoToLevelDB(s, &f)
|
|
|
|
|
|
|
|
|
|
if data, err := json.Marshal(&f); err == nil {
|
|
|
|
|
batch.Put([]byte(s), data)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if i%10000 == 0 {
|
|
|
|
|
|
|
|
|
|
if batch.Len() > 0 {
|
2019-01-16 10:28:51 +08:00
|
|
|
|
server.ldb.Write(batch, nil)
|
2019-01-04 10:22:03 +08:00
|
|
|
|
// batch = new(leveldb.Batch)
|
|
|
|
|
batch.Reset()
|
|
|
|
|
}
|
|
|
|
|
fmt.Println(i, time.Since(t).Seconds())
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-20 11:05:22 +08:00
|
|
|
|
//fmt.Println(server.GetFileInfoFromLevelDB(s))
|
2019-01-04 10:22:03 +08:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-18 10:32:25 +08:00
|
|
|
|
this.util.WriteFile("time.txt", time.Since(t).String())
|
2019-01-04 10:22:03 +08:00
|
|
|
|
fmt.Println(time.Since(t).String())
|
|
|
|
|
}
|
2019-01-16 15:30:53 +08:00
|
|
|
|
|
|
|
|
|
func (this *Server) RepairStatWeb(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
var (
|
|
|
|
|
result JsonResult
|
|
|
|
|
)
|
2019-01-16 15:30:53 +08:00
|
|
|
|
this.RepairStat()
|
2019-02-12 12:55:55 +08:00
|
|
|
|
result.Status = "ok"
|
|
|
|
|
w.Write([]byte(this.util.JsonEncodePretty(result)))
|
2019-01-16 15:30:53 +08:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
func (this *Server) Stat(w http.ResponseWriter, r *http.Request) {
|
2019-02-12 12:55:55 +08:00
|
|
|
|
var (
|
|
|
|
|
result JsonResult
|
2019-02-12 18:35:26 +08:00
|
|
|
|
inner string
|
2019-02-12 12:55:55 +08:00
|
|
|
|
)
|
2019-02-12 18:35:26 +08:00
|
|
|
|
r.ParseForm()
|
|
|
|
|
inner=r.FormValue("inner")
|
|
|
|
|
data := this.GetStat()
|
2019-02-12 12:55:55 +08:00
|
|
|
|
result.Status = "ok"
|
|
|
|
|
result.Data = data
|
2019-02-12 18:35:26 +08:00
|
|
|
|
if inner=="1" {
|
|
|
|
|
w.Write([]byte(this.util.JsonEncodePretty(data)))
|
|
|
|
|
} else {
|
|
|
|
|
w.Write([]byte(this.util.JsonEncodePretty(result)))
|
|
|
|
|
}
|
2019-01-17 21:46:29 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (this *Server) GetStat() []StatDateFileInfo {
|
2019-01-09 12:05:20 +08:00
|
|
|
|
var (
|
2019-01-19 09:52:10 +08:00
|
|
|
|
min int64
|
|
|
|
|
max int64
|
|
|
|
|
err error
|
|
|
|
|
i int64
|
2019-01-17 21:46:29 +08:00
|
|
|
|
|
2019-01-09 12:05:20 +08:00
|
|
|
|
rows []StatDateFileInfo
|
|
|
|
|
)
|
|
|
|
|
min = 20190101
|
|
|
|
|
max = 20190101
|
2019-01-19 09:52:10 +08:00
|
|
|
|
for k := range this.statMap.Get() {
|
2019-01-09 12:05:20 +08:00
|
|
|
|
ks := strings.Split(k, "_")
|
|
|
|
|
if len(ks) == 2 {
|
|
|
|
|
if i, err = strconv.ParseInt(ks[0], 10, 64); err != nil {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
if i >= max {
|
|
|
|
|
max = i
|
|
|
|
|
}
|
|
|
|
|
if i < min {
|
|
|
|
|
min = i
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for i := min; i <= max; i++ {
|
|
|
|
|
|
|
|
|
|
s := fmt.Sprintf("%d", i)
|
2019-01-18 10:32:25 +08:00
|
|
|
|
if v, ok := this.statMap.GetValue(s + "_" + CONST_STAT_FILE_TOTAL_SIZE_KEY); ok {
|
2019-01-09 12:05:20 +08:00
|
|
|
|
var info StatDateFileInfo
|
|
|
|
|
info.Date = s
|
|
|
|
|
switch v.(type) {
|
|
|
|
|
case int64:
|
|
|
|
|
info.TotalSize = v.(int64)
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-18 10:32:25 +08:00
|
|
|
|
if v, ok := this.statMap.GetValue(s + "_" + CONST_STAT_FILE_COUNT_KEY); ok {
|
2019-01-09 12:05:20 +08:00
|
|
|
|
switch v.(type) {
|
|
|
|
|
case int64:
|
|
|
|
|
info.FileCount = v.(int64)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
rows = append(rows, info)
|
2018-05-10 13:31:34 +08:00
|
|
|
|
|
2019-01-02 17:46:30 +08:00
|
|
|
|
}
|
2019-01-09 12:05:20 +08:00
|
|
|
|
|
2017-09-09 16:40:55 +08:00
|
|
|
|
}
|
2019-01-09 13:13:21 +08:00
|
|
|
|
|
2019-01-18 10:32:25 +08:00
|
|
|
|
if v, ok := this.statMap.GetValue(CONST_STAT_FILE_COUNT_KEY); ok {
|
2019-01-09 13:13:21 +08:00
|
|
|
|
var info StatDateFileInfo
|
|
|
|
|
info.Date = "all"
|
|
|
|
|
info.FileCount = v.(int64)
|
2019-01-18 10:32:25 +08:00
|
|
|
|
if v, ok := this.statMap.GetValue(CONST_STAT_FILE_TOTAL_SIZE_KEY); ok {
|
2019-01-09 13:13:21 +08:00
|
|
|
|
info.TotalSize = v.(int64)
|
|
|
|
|
}
|
|
|
|
|
rows = append(rows, info)
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-17 21:46:29 +08:00
|
|
|
|
return rows
|
|
|
|
|
|
2019-01-09 12:05:20 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (this *Server) RegisterExit() {
|
|
|
|
|
c := make(chan os.Signal)
|
|
|
|
|
signal.Notify(c, syscall.SIGHUP, syscall.SIGINT, syscall.SIGTERM, syscall.SIGQUIT)
|
|
|
|
|
go func() {
|
|
|
|
|
for s := range c {
|
|
|
|
|
switch s {
|
|
|
|
|
case syscall.SIGHUP, syscall.SIGINT, syscall.SIGTERM, syscall.SIGQUIT:
|
2019-01-16 10:28:51 +08:00
|
|
|
|
this.ldb.Close()
|
2019-01-09 12:05:20 +08:00
|
|
|
|
log.Info("Exit", s)
|
|
|
|
|
os.Exit(1)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}()
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-20 23:45:16 +08:00
|
|
|
|
func (this *Server) AppendToQueue(fileInfo *FileInfo) {
|
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
this.queueToPeers <- *fileInfo
|
2019-01-20 23:45:16 +08:00
|
|
|
|
|
2019-01-21 15:07:42 +08:00
|
|
|
|
}
|
2019-01-20 23:45:16 +08:00
|
|
|
|
|
2019-01-21 15:07:42 +08:00
|
|
|
|
func (this *Server) AppendToDownloadQueue(fileInfo *FileInfo) {
|
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
this.queueFromPeers <- *fileInfo
|
2019-01-21 15:07:42 +08:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (this *Server) ConsumerDownLoad() {
|
|
|
|
|
|
|
|
|
|
ConsumerFunc := func() {
|
|
|
|
|
|
|
|
|
|
for {
|
|
|
|
|
fileInfo := <-this.queueFromPeers
|
|
|
|
|
if len(fileInfo.Peers) <= 0 {
|
2019-02-11 17:39:00 +08:00
|
|
|
|
log.Warn("Peer is null", fileInfo)
|
2019-01-21 15:07:42 +08:00
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
for _, peer := range fileInfo.Peers {
|
2019-01-28 17:01:26 +08:00
|
|
|
|
if strings.Contains(peer, "127.0.0.1") {
|
2019-02-01 11:31:19 +08:00
|
|
|
|
log.Warn("sync error with 127.0.0.1", fileInfo)
|
2019-01-28 17:01:26 +08:00
|
|
|
|
continue
|
|
|
|
|
}
|
2019-01-21 16:58:16 +08:00
|
|
|
|
if peer != this.host {
|
2019-01-28 17:01:26 +08:00
|
|
|
|
|
2019-01-21 15:07:42 +08:00
|
|
|
|
this.DownloadFromPeer(peer, &fileInfo)
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for i := 0; i < 50; i++ {
|
|
|
|
|
|
|
|
|
|
go ConsumerFunc()
|
|
|
|
|
|
|
|
|
|
}
|
2019-01-20 23:45:16 +08:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-17 01:05:27 +08:00
|
|
|
|
func (this *Server) Consumer() {
|
|
|
|
|
|
|
|
|
|
ConsumerFunc := func() {
|
|
|
|
|
|
|
|
|
|
for {
|
2019-01-18 10:32:25 +08:00
|
|
|
|
fileInfo := <-this.queueToPeers
|
2019-01-18 22:49:09 +08:00
|
|
|
|
this.postFileToPeer(&fileInfo)
|
2019-01-17 01:05:27 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for i := 0; i < 50; i++ {
|
|
|
|
|
|
|
|
|
|
go ConsumerFunc()
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
func (this *Server) AutoRepair(forceRepair bool) {
|
2019-01-17 21:46:29 +08:00
|
|
|
|
|
2019-02-12 18:35:26 +08:00
|
|
|
|
if this.lockMap.IsLock("AutoRepair") {
|
|
|
|
|
log.Warn("Lock AutoRepair")
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
this.lockMap.LockKey("AutoRepair")
|
|
|
|
|
defer this.lockMap.UnLockKey("AutoRepair")
|
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
AutoRepairFunc := func(forceRepair bool) {
|
2019-01-17 21:46:29 +08:00
|
|
|
|
|
|
|
|
|
var (
|
|
|
|
|
dateStats []StatDateFileInfo
|
2019-02-12 18:35:26 +08:00
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
err error
|
|
|
|
|
countKey string
|
|
|
|
|
md5s string
|
|
|
|
|
localSet mapset.Set
|
2019-01-18 19:05:18 +08:00
|
|
|
|
remoteSet mapset.Set
|
2019-01-19 09:52:10 +08:00
|
|
|
|
allSet mapset.Set
|
2019-01-20 11:05:22 +08:00
|
|
|
|
tmpSet mapset.Set
|
|
|
|
|
fileInfo *FileInfo
|
2019-01-17 21:46:29 +08:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
|
if re := recover(); re != nil {
|
|
|
|
|
buffer := debug.Stack()
|
|
|
|
|
log.Error("AutoRepair")
|
|
|
|
|
log.Error(re)
|
|
|
|
|
log.Error(string(buffer))
|
|
|
|
|
}
|
|
|
|
|
}()
|
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
Update := func(peer string, dateStat StatDateFileInfo) { //从远端拉数据过来
|
2019-01-17 21:46:29 +08:00
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
req := httplib.Get(fmt.Sprintf("%s%s?date=%s&force=%s", peer, this.getRequestURI("sync"), dateStat.Date, "1"))
|
2019-01-19 09:52:10 +08:00
|
|
|
|
req.SetTimeout(time.Second*5, time.Second*5)
|
|
|
|
|
if _, err = req.String(); err != nil {
|
2019-01-17 21:46:29 +08:00
|
|
|
|
log.Error(err)
|
|
|
|
|
|
|
|
|
|
}
|
2019-01-19 09:52:10 +08:00
|
|
|
|
log.Info(fmt.Sprintf("syn file from %s date %s", peer, dateStat.Date))
|
2019-01-17 21:46:29 +08:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
for _, peer := range Config().Peers {
|
2019-01-17 21:46:29 +08:00
|
|
|
|
|
2019-02-12 18:35:26 +08:00
|
|
|
|
req := httplib.Post(fmt.Sprintf("%s%s", peer, this.getRequestURI("stat")))
|
|
|
|
|
req.Param("inner","1")
|
|
|
|
|
req.SetTimeout(time.Second*5, time.Second*15)
|
2019-01-19 09:52:10 +08:00
|
|
|
|
if err = req.ToJSON(&dateStats); err != nil {
|
2019-01-17 21:46:29 +08:00
|
|
|
|
log.Error(err)
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
for _, dateStat := range dateStats {
|
|
|
|
|
if dateStat.Date == "all" {
|
2019-01-17 21:46:29 +08:00
|
|
|
|
continue
|
|
|
|
|
}
|
2019-01-19 09:52:10 +08:00
|
|
|
|
countKey = dateStat.Date + "_" + CONST_STAT_FILE_COUNT_KEY
|
|
|
|
|
if v, ok := this.statMap.GetValue(countKey); ok {
|
2019-01-17 21:46:29 +08:00
|
|
|
|
switch v.(type) {
|
|
|
|
|
case int64:
|
2019-02-01 11:31:19 +08:00
|
|
|
|
if v.(int64) != dateStat.FileCount || forceRepair { //不相等,找差异
|
2019-01-19 09:52:10 +08:00
|
|
|
|
//TODO
|
2019-02-12 12:55:55 +08:00
|
|
|
|
req := httplib.Post(fmt.Sprintf("%s%s", peer, this.getRequestURI("get_md5s_by_date")))
|
2019-01-28 17:01:26 +08:00
|
|
|
|
req.SetTimeout(time.Second*5, time.Second*20)
|
2019-01-18 19:05:18 +08:00
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
req.Param("date", dateStat.Date)
|
2019-01-18 19:05:18 +08:00
|
|
|
|
|
2019-01-19 09:52:10 +08:00
|
|
|
|
if md5s, err = req.String(); err != nil {
|
|
|
|
|
continue
|
2019-01-18 19:05:18 +08:00
|
|
|
|
}
|
2019-01-20 11:05:22 +08:00
|
|
|
|
if localSet, err = this.GetMd5sByDate(dateStat.Date, CONST_FILE_Md5_FILE_NAME); err != nil {
|
2019-01-19 09:52:10 +08:00
|
|
|
|
log.Error(err)
|
|
|
|
|
continue
|
2019-01-18 19:05:18 +08:00
|
|
|
|
}
|
2019-01-19 09:52:10 +08:00
|
|
|
|
remoteSet = this.util.StrToMapSet(md5s, ",")
|
|
|
|
|
allSet = localSet.Union(remoteSet)
|
|
|
|
|
md5s = this.util.MapSetToStr(allSet.Difference(localSet), ",")
|
2019-02-12 12:55:55 +08:00
|
|
|
|
req = httplib.Post(fmt.Sprintf("%s%s", peer, this.getRequestURI("receive_md5s")))
|
2019-01-20 23:45:16 +08:00
|
|
|
|
req.SetTimeout(time.Second*5, time.Second*15)
|
2019-01-19 09:52:10 +08:00
|
|
|
|
req.Param("md5s", md5s)
|
|
|
|
|
req.String()
|
2019-01-20 11:05:22 +08:00
|
|
|
|
tmpSet = allSet.Difference(remoteSet)
|
|
|
|
|
for v := range tmpSet.Iter() {
|
2019-01-28 17:01:26 +08:00
|
|
|
|
if v != nil {
|
|
|
|
|
if fileInfo, err = this.GetFileInfoFromLevelDB(v.(string)); err != nil {
|
2019-02-01 13:13:23 +08:00
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
log.Error(err)
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
this.AppendToQueue(fileInfo)
|
2019-01-19 10:35:42 +08:00
|
|
|
|
}
|
|
|
|
|
}
|
2019-01-18 19:05:18 +08:00
|
|
|
|
|
|
|
|
|
//Update(peer,dateStat)
|
2019-01-17 21:46:29 +08:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} else {
|
2019-01-19 09:52:10 +08:00
|
|
|
|
Update(peer, dateStat)
|
2019-01-17 21:46:29 +08:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
AutoRepairFunc(forceRepair)
|
2019-01-17 21:46:29 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
func (this *Server) CleanMd5SumCache() {
|
|
|
|
|
|
|
|
|
|
Clean := func() {
|
|
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
|
if re := recover(); re != nil {
|
|
|
|
|
buffer := debug.Stack()
|
|
|
|
|
log.Error("Check")
|
|
|
|
|
log.Error(re)
|
|
|
|
|
log.Error(string(buffer))
|
|
|
|
|
}
|
|
|
|
|
}()
|
|
|
|
|
|
|
|
|
|
var (
|
|
|
|
|
today string
|
|
|
|
|
memstat *runtime.MemStats
|
|
|
|
|
keys []string
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
memstat = new(runtime.MemStats)
|
|
|
|
|
|
|
|
|
|
runtime.ReadMemStats(memstat)
|
|
|
|
|
|
|
|
|
|
_ = memstat
|
|
|
|
|
|
|
|
|
|
today = this.util.GetToDay()
|
|
|
|
|
|
|
|
|
|
_ = today
|
|
|
|
|
|
|
|
|
|
keys = this.sumMap.Keys()
|
|
|
|
|
|
|
|
|
|
for _, k := range keys {
|
|
|
|
|
if strings.HasPrefix(k, today) {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
if v, ok := this.sumMap.GetValue(k); ok {
|
|
|
|
|
v.(mapset.Set).Clear()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
go func() {
|
|
|
|
|
for {
|
|
|
|
|
time.Sleep(time.Minute * 10)
|
|
|
|
|
Clean()
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
}()
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-09 12:05:20 +08:00
|
|
|
|
func (this *Server) Check() {
|
|
|
|
|
|
2019-01-09 13:13:21 +08:00
|
|
|
|
check := func() {
|
|
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
|
if re := recover(); re != nil {
|
|
|
|
|
buffer := debug.Stack()
|
2019-01-18 19:05:18 +08:00
|
|
|
|
log.Error("Check")
|
2019-01-09 13:13:21 +08:00
|
|
|
|
log.Error(re)
|
|
|
|
|
log.Error(string(buffer))
|
|
|
|
|
}
|
|
|
|
|
}()
|
|
|
|
|
|
|
|
|
|
var (
|
2019-02-12 12:55:55 +08:00
|
|
|
|
status JsonResult
|
2019-01-09 13:13:21 +08:00
|
|
|
|
err error
|
|
|
|
|
subject string
|
|
|
|
|
body string
|
2019-01-09 14:48:15 +08:00
|
|
|
|
req *httplib.BeegoHTTPRequest
|
2019-01-09 13:13:21 +08:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
for _, peer := range Config().Peers {
|
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
req = httplib.Get(fmt.Sprintf("%s%s", peer, this.getRequestURI("status")))
|
2019-01-09 13:13:21 +08:00
|
|
|
|
req.SetTimeout(time.Second*5, time.Second*5)
|
|
|
|
|
err = req.ToJSON(&status)
|
|
|
|
|
|
|
|
|
|
if status.Status != "ok" {
|
|
|
|
|
|
|
|
|
|
for _, to := range Config().AlramReceivers {
|
2019-01-09 14:48:15 +08:00
|
|
|
|
subject = "fastdfs server error"
|
2019-01-09 13:13:21 +08:00
|
|
|
|
|
|
|
|
|
if err != nil {
|
2019-01-09 14:48:15 +08:00
|
|
|
|
body = fmt.Sprintf("%s\nserver:%s\nerror:\n%s", subject, peer, err.Error())
|
|
|
|
|
} else {
|
|
|
|
|
body = fmt.Sprintf("%s\nserver:%s\n", subject, peer)
|
|
|
|
|
}
|
|
|
|
|
if err = this.SendToMail(to, subject, body, "text"); err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if Config().AlarmUrl != "" {
|
|
|
|
|
req = httplib.Post(Config().AlarmUrl)
|
|
|
|
|
req.SetTimeout(time.Second*10, time.Second*10)
|
|
|
|
|
req.Param("message", body)
|
|
|
|
|
req.Param("subject", subject)
|
|
|
|
|
if _, err = req.String(); err != nil {
|
|
|
|
|
log.Error(err)
|
2019-01-09 13:13:21 +08:00
|
|
|
|
}
|
2019-01-09 14:48:15 +08:00
|
|
|
|
|
2019-01-09 13:13:21 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
go func() {
|
|
|
|
|
for {
|
2019-01-09 14:57:43 +08:00
|
|
|
|
time.Sleep(time.Minute * 10)
|
2019-01-09 13:13:21 +08:00
|
|
|
|
check()
|
|
|
|
|
}
|
|
|
|
|
}()
|
|
|
|
|
|
|
|
|
|
}
|
2019-02-12 11:08:19 +08:00
|
|
|
|
func (this *Server) RepairFileInfo(w http.ResponseWriter, r *http.Request) {
|
2019-02-12 12:55:55 +08:00
|
|
|
|
var (
|
|
|
|
|
result JsonResult
|
|
|
|
|
)
|
2019-02-12 11:08:19 +08:00
|
|
|
|
if !this.IsPeer(r) {
|
|
|
|
|
w.Write([]byte(this.GetClusterNotPermitMessage(r)))
|
|
|
|
|
return
|
|
|
|
|
}
|
2019-02-12 12:55:55 +08:00
|
|
|
|
result.Status = "ok"
|
|
|
|
|
result.Message = "repair job start,don't try again"
|
2019-02-12 11:08:19 +08:00
|
|
|
|
go this.RepairFileInfoFromFile()
|
2019-02-12 12:55:55 +08:00
|
|
|
|
w.Write([]byte(this.util.JsonEncodePretty(result)))
|
2019-02-12 11:08:19 +08:00
|
|
|
|
}
|
2019-01-09 13:13:21 +08:00
|
|
|
|
|
2019-01-22 19:26:05 +08:00
|
|
|
|
func (this *Server) Reload(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
|
|
|
|
var (
|
|
|
|
|
err error
|
|
|
|
|
data []byte
|
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
cfg GloablConfig
|
|
|
|
|
action string
|
2019-02-12 11:08:19 +08:00
|
|
|
|
cfgjson string
|
2019-02-12 12:55:55 +08:00
|
|
|
|
result JsonResult
|
2019-01-22 19:26:05 +08:00
|
|
|
|
)
|
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
result.Status = "fail"
|
|
|
|
|
|
2019-02-12 11:08:19 +08:00
|
|
|
|
r.ParseForm()
|
2019-01-22 19:26:05 +08:00
|
|
|
|
if !this.IsPeer(r) {
|
2019-02-11 16:02:34 +08:00
|
|
|
|
w.Write([]byte(this.GetClusterNotPermitMessage(r)))
|
2019-01-22 19:26:05 +08:00
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
cfgjson = r.FormValue("cfg")
|
|
|
|
|
action = r.FormValue("action")
|
|
|
|
|
_ = cfgjson
|
2019-02-12 11:08:19 +08:00
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
if action == "get" {
|
|
|
|
|
result.Data = Config()
|
|
|
|
|
result.Status = "ok"
|
|
|
|
|
w.Write([]byte(this.util.JsonEncodePretty(result)))
|
2019-01-22 19:26:05 +08:00
|
|
|
|
return
|
2019-02-12 11:08:19 +08:00
|
|
|
|
|
2019-01-22 19:26:05 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
if action == "set" {
|
|
|
|
|
if cfgjson == "" {
|
|
|
|
|
result.Message = "(error)parameter cfg(json) require"
|
|
|
|
|
w.Write([]byte(this.util.JsonEncodePretty(result)))
|
2019-02-12 11:08:19 +08:00
|
|
|
|
return
|
|
|
|
|
}
|
2019-02-12 12:55:55 +08:00
|
|
|
|
if err = json.Unmarshal([]byte(cfgjson), &cfg); err != nil {
|
2019-02-12 11:08:19 +08:00
|
|
|
|
log.Error(err)
|
2019-02-12 12:55:55 +08:00
|
|
|
|
result.Message = err.Error()
|
|
|
|
|
w.Write([]byte(this.util.JsonEncodePretty(result)))
|
2019-02-12 11:08:19 +08:00
|
|
|
|
return
|
|
|
|
|
}
|
2019-02-12 12:55:55 +08:00
|
|
|
|
result.Status = "ok"
|
|
|
|
|
cfgjson = this.util.JsonEncodePretty(cfg)
|
|
|
|
|
this.util.WriteFile(CONST_CONF_FILE_NAME, cfgjson)
|
|
|
|
|
w.Write([]byte(this.util.JsonEncodePretty(result)))
|
2019-01-22 19:26:05 +08:00
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-12 12:55:55 +08:00
|
|
|
|
if action == "reload" {
|
2019-02-12 11:08:19 +08:00
|
|
|
|
|
|
|
|
|
if data, err = ioutil.ReadFile(CONST_CONF_FILE_NAME); err != nil {
|
2019-02-12 12:55:55 +08:00
|
|
|
|
result.Message = err.Error()
|
|
|
|
|
w.Write([]byte(this.util.JsonEncodePretty(result)))
|
2019-02-12 11:08:19 +08:00
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if err = json.Unmarshal(data, &cfg); err != nil {
|
2019-02-12 12:55:55 +08:00
|
|
|
|
result.Message = err.Error()
|
|
|
|
|
w.Write([]byte(this.util.JsonEncodePretty(result)))
|
2019-02-12 11:08:19 +08:00
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ParseConfig(CONST_CONF_FILE_NAME)
|
|
|
|
|
|
|
|
|
|
this.initComponent(true)
|
2019-02-12 12:55:55 +08:00
|
|
|
|
result.Status = "ok"
|
|
|
|
|
w.Write([]byte(this.util.JsonEncodePretty(result)))
|
2019-02-12 11:08:19 +08:00
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
}
|
2019-02-12 12:55:55 +08:00
|
|
|
|
if action == "" {
|
2019-02-12 11:08:19 +08:00
|
|
|
|
w.Write([]byte("(error)action support set(json) get reload"))
|
|
|
|
|
}
|
2019-01-22 19:26:05 +08:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-19 10:35:42 +08:00
|
|
|
|
func (this *Server) Repair(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
|
|
|
|
var (
|
2019-02-11 16:02:34 +08:00
|
|
|
|
force string
|
2019-02-01 11:31:19 +08:00
|
|
|
|
forceRepair bool
|
2019-02-12 12:55:55 +08:00
|
|
|
|
result JsonResult
|
2019-01-19 10:35:42 +08:00
|
|
|
|
)
|
2019-02-12 12:55:55 +08:00
|
|
|
|
result.Status = "ok"
|
2019-01-19 10:35:42 +08:00
|
|
|
|
r.ParseForm()
|
2019-01-20 11:05:22 +08:00
|
|
|
|
force = r.FormValue("force")
|
|
|
|
|
if force == "1" {
|
2019-02-01 11:31:19 +08:00
|
|
|
|
forceRepair = true
|
2019-01-19 10:35:42 +08:00
|
|
|
|
}
|
|
|
|
|
if this.IsPeer(r) {
|
2019-02-01 11:31:19 +08:00
|
|
|
|
go this.AutoRepair(forceRepair)
|
2019-02-12 12:55:55 +08:00
|
|
|
|
result.Message = "repair job start..."
|
|
|
|
|
w.Write([]byte(this.util.JsonEncodePretty(result)))
|
2019-01-19 10:35:42 +08:00
|
|
|
|
} else {
|
2019-02-12 12:55:55 +08:00
|
|
|
|
result.Message = this.GetClusterNotPermitMessage(r)
|
|
|
|
|
w.Write([]byte(this.util.JsonEncodePretty(result)))
|
2019-01-19 10:35:42 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
2019-01-18 22:49:09 +08:00
|
|
|
|
|
2019-01-09 13:13:21 +08:00
|
|
|
|
func (this *Server) Status(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
|
|
|
|
var (
|
2019-02-12 12:55:55 +08:00
|
|
|
|
status JsonResult
|
2019-01-09 13:13:21 +08:00
|
|
|
|
err error
|
|
|
|
|
data []byte
|
2019-01-21 16:58:16 +08:00
|
|
|
|
sts map[string]interface{}
|
2019-01-28 17:01:26 +08:00
|
|
|
|
today string
|
|
|
|
|
sumset mapset.Set
|
|
|
|
|
ok bool
|
|
|
|
|
v interface{}
|
2019-01-09 13:13:21 +08:00
|
|
|
|
)
|
2019-01-21 16:58:16 +08:00
|
|
|
|
memStat := new(runtime.MemStats)
|
2019-01-28 17:01:26 +08:00
|
|
|
|
runtime.ReadMemStats(memStat)
|
|
|
|
|
today = this.util.GetToDay()
|
2019-01-21 16:58:16 +08:00
|
|
|
|
|
|
|
|
|
sts = make(map[string]interface{})
|
|
|
|
|
sts["Fs.QueueFromPeers"] = len(this.queueFromPeers)
|
|
|
|
|
sts["Fs.QueueToPeers"] = len(this.queueToPeers)
|
2019-01-28 17:01:26 +08:00
|
|
|
|
for _, k := range []string{CONST_FILE_Md5_FILE_NAME, CONST_Md5_ERROR_FILE_NAME, CONST_Md5_QUEUE_FILE_NAME} {
|
|
|
|
|
k2 := fmt.Sprintf("%s_%s", today, k)
|
|
|
|
|
if v, ok = this.sumMap.GetValue(k2); ok {
|
|
|
|
|
sumset = v.(mapset.Set)
|
|
|
|
|
if k == CONST_Md5_QUEUE_FILE_NAME {
|
|
|
|
|
|
|
|
|
|
sts["Fs.QueueSetSize"] = sumset.Cardinality()
|
|
|
|
|
}
|
|
|
|
|
if k == CONST_Md5_ERROR_FILE_NAME {
|
|
|
|
|
sts["Fs.ErrorSetSize"] = sumset.Cardinality()
|
|
|
|
|
}
|
|
|
|
|
if k == CONST_FILE_Md5_FILE_NAME {
|
|
|
|
|
sts["Fs.FileSetSize"] = sumset.Cardinality()
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-21 16:58:16 +08:00
|
|
|
|
sts["Fs.AutoRepair"] = Config().AutoRepair
|
|
|
|
|
sts["Fs.RefreshInterval"] = Config().RefreshInterval
|
|
|
|
|
sts["Fs.Peers"] = Config().Peers
|
|
|
|
|
sts["Fs.Local"] = this.host
|
|
|
|
|
sts["Fs.FileStats"] = this.GetStat()
|
|
|
|
|
sts["Fs.ShowDir"] = Config().ShowDir
|
|
|
|
|
sts["Sys.NumGoroutine"] = runtime.NumGoroutine()
|
|
|
|
|
sts["Sys.NumCpu"] = runtime.NumCPU()
|
|
|
|
|
sts["Sys.Alloc"] = memStat.Alloc
|
|
|
|
|
sts["Sys.TotalAlloc"] = memStat.TotalAlloc
|
|
|
|
|
sts["Sys.HeapAlloc"] = memStat.HeapAlloc
|
|
|
|
|
sts["Sys.Frees"] = memStat.Frees
|
|
|
|
|
sts["Sys.HeapObjects"] = memStat.HeapObjects
|
|
|
|
|
sts["Sys.NumGC"] = memStat.NumGC
|
|
|
|
|
sts["Sys.GCCPUFraction"] = memStat.GCCPUFraction
|
|
|
|
|
sts["Sys.GCSys"] = memStat.GCSys
|
2019-01-28 17:01:26 +08:00
|
|
|
|
//sts["Sys.MemInfo"] = memStat
|
2019-01-09 13:13:21 +08:00
|
|
|
|
|
|
|
|
|
status.Status = "ok"
|
2019-01-21 16:58:16 +08:00
|
|
|
|
status.Data = sts
|
|
|
|
|
|
|
|
|
|
w.Write([]byte(this.util.JsonEncodePretty(status)))
|
|
|
|
|
return
|
2019-01-09 13:13:21 +08:00
|
|
|
|
|
|
|
|
|
if data, err = json.Marshal(&status); err != nil {
|
|
|
|
|
status.Status = "fail"
|
|
|
|
|
status.Message = err.Error()
|
|
|
|
|
w.Write(data)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
w.Write(data)
|
|
|
|
|
|
2019-01-09 12:05:20 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (this *Server) HeartBeat(w http.ResponseWriter, r *http.Request) {
|
2019-01-02 17:46:30 +08:00
|
|
|
|
|
2017-09-09 16:40:55 +08:00
|
|
|
|
}
|
|
|
|
|
|
2018-05-10 18:19:04 +08:00
|
|
|
|
func (this *Server) Index(w http.ResponseWriter, r *http.Request) {
|
2019-02-02 13:09:25 +08:00
|
|
|
|
var (
|
|
|
|
|
uploadUrl string
|
|
|
|
|
)
|
2019-02-11 16:02:34 +08:00
|
|
|
|
uploadUrl = "/upload"
|
2019-01-03 10:08:01 +08:00
|
|
|
|
if Config().EnableWebUpload {
|
2019-02-11 16:02:34 +08:00
|
|
|
|
if Config().SupportGroupManage {
|
|
|
|
|
uploadUrl = fmt.Sprintf("/%s/upload", Config().Group)
|
2019-02-02 13:09:25 +08:00
|
|
|
|
}
|
2019-01-03 10:08:01 +08:00
|
|
|
|
fmt.Fprintf(w,
|
2019-01-08 10:23:13 +08:00
|
|
|
|
fmt.Sprintf(`<html>
|
2017-09-09 16:40:55 +08:00
|
|
|
|
<head>
|
|
|
|
|
<meta charset="utf-8"></meta>
|
|
|
|
|
<title>Uploader</title>
|
2019-01-08 16:53:03 +08:00
|
|
|
|
<style>
|
|
|
|
|
form {
|
|
|
|
|
bargin
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
.form-line {
|
|
|
|
|
display:block;
|
|
|
|
|
}
|
|
|
|
|
</style>
|
2017-09-09 16:40:55 +08:00
|
|
|
|
</head>
|
|
|
|
|
<body>
|
2019-02-02 13:09:25 +08:00
|
|
|
|
<form action="%s" method="post" enctype="multipart/form-data">
|
2019-01-08 16:53:03 +08:00
|
|
|
|
<span class="form-line">文件(file):<input type="file" id="file" name="file" ></span>
|
|
|
|
|
<span class="form-line">场景(scene):<input type="text" id="scene" name="scene" value="%s"></span>
|
|
|
|
|
<span class="form-line">输出(output):<input type="text" id="output" name="output" value="json"></span>
|
|
|
|
|
<span class="form-line">自定义路径(path):<input type="text" id="path" name="path" value=""></span>
|
2017-09-09 16:40:55 +08:00
|
|
|
|
<input type="submit" name="submit" value="upload">
|
|
|
|
|
</form>
|
|
|
|
|
</body>
|
2019-02-02 13:09:25 +08:00
|
|
|
|
</html>`, uploadUrl, Config().DefaultScene))
|
2019-01-03 10:08:01 +08:00
|
|
|
|
} else {
|
|
|
|
|
w.Write([]byte("web upload deny"))
|
|
|
|
|
}
|
2017-09-09 16:40:55 +08:00
|
|
|
|
}
|
|
|
|
|
|
2018-05-10 18:19:04 +08:00
|
|
|
|
func init() {
|
2019-01-18 10:32:25 +08:00
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
for _, folder := range FOLDERS {
|
2019-01-28 19:51:52 +08:00
|
|
|
|
os.Mkdir(folder, 0775)
|
2018-12-30 17:17:40 +08:00
|
|
|
|
}
|
|
|
|
|
flag.Parse()
|
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
peerId := fmt.Sprintf("%d", server.util.RandInt(0, 9))
|
|
|
|
|
|
2019-01-18 10:32:25 +08:00
|
|
|
|
if !server.util.FileExists(CONST_CONF_FILE_NAME) {
|
2019-01-01 14:41:57 +08:00
|
|
|
|
|
2019-01-18 10:32:25 +08:00
|
|
|
|
peer := "http://" + server.util.GetPulicIP() + ":8080"
|
2019-01-01 14:41:57 +08:00
|
|
|
|
|
2019-02-11 16:02:34 +08:00
|
|
|
|
cfg := fmt.Sprintf(cfgJson, peerId, peer, peer)
|
2019-01-01 14:41:57 +08:00
|
|
|
|
|
2019-01-18 10:32:25 +08:00
|
|
|
|
server.util.WriteFile(CONST_CONF_FILE_NAME, cfg)
|
2019-01-01 14:41:57 +08:00
|
|
|
|
}
|
|
|
|
|
|
2018-12-30 18:18:42 +08:00
|
|
|
|
if logger, err := log.LoggerFromConfigAsBytes([]byte(logConfigStr)); err != nil {
|
|
|
|
|
panic(err)
|
|
|
|
|
|
|
|
|
|
} else {
|
|
|
|
|
log.ReplaceLogger(logger)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if _logacc, err := log.LoggerFromConfigAsBytes([]byte(logAccessConfigStr)); err == nil {
|
|
|
|
|
logacc = _logacc
|
|
|
|
|
log.Info("succes init log access")
|
|
|
|
|
|
|
|
|
|
} else {
|
|
|
|
|
log.Error(err.Error())
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-01 14:41:57 +08:00
|
|
|
|
ParseConfig(CONST_CONF_FILE_NAME)
|
|
|
|
|
|
2019-01-17 01:05:27 +08:00
|
|
|
|
if Config().QueueSize == 0 {
|
|
|
|
|
Config().QueueSize = CONST_QUEUE_SIZE
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
if Config().PeerId == "" {
|
|
|
|
|
Config().PeerId = peerId
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-07 17:19:06 +08:00
|
|
|
|
staticHandler = http.StripPrefix("/"+Config().Group+"/", http.FileServer(http.Dir(STORE_DIR)))
|
2019-01-01 14:41:57 +08:00
|
|
|
|
|
2019-01-18 10:32:25 +08:00
|
|
|
|
server.initComponent(false)
|
2017-09-09 16:40:55 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-12 17:22:47 +08:00
|
|
|
|
func (this *Server)test() {
|
|
|
|
|
|
|
|
|
|
tt:= func(i int) {
|
|
|
|
|
|
|
|
|
|
if server.lockMap.IsLock("xx") {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
server.lockMap.LockKey("xx")
|
|
|
|
|
defer server.lockMap.UnLockKey("xx")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
//time.Sleep(time.Nanosecond*1)
|
|
|
|
|
fmt.Println("xx",i)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for i:=0;i<10000;i++ {
|
|
|
|
|
go tt(i)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
time.Sleep(time.Second*3)
|
|
|
|
|
|
|
|
|
|
go tt(999999)
|
|
|
|
|
go tt(999999)
|
|
|
|
|
go tt(999999)
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
func (this *Server) initComponent(isReload bool) {
|
2019-01-02 17:46:30 +08:00
|
|
|
|
var (
|
|
|
|
|
err error
|
|
|
|
|
ip string
|
|
|
|
|
stat map[string]interface{}
|
|
|
|
|
data []byte
|
|
|
|
|
count int64
|
|
|
|
|
)
|
2019-01-18 10:32:25 +08:00
|
|
|
|
ip = this.util.GetPulicIP()
|
2019-01-21 15:07:42 +08:00
|
|
|
|
|
2019-02-01 11:31:19 +08:00
|
|
|
|
if Config().Host == "" {
|
2019-01-21 16:58:16 +08:00
|
|
|
|
if len(strings.Split(Config().Addr, ":")) == 2 {
|
|
|
|
|
server.host = fmt.Sprintf("http://%s:%s", ip, strings.Split(Config().Addr, ":")[1])
|
2019-02-11 16:02:34 +08:00
|
|
|
|
Config().Host = server.host
|
2019-01-21 15:07:42 +08:00
|
|
|
|
}
|
2019-02-01 11:31:19 +08:00
|
|
|
|
} else {
|
2019-02-11 16:02:34 +08:00
|
|
|
|
server.host = Config().Host
|
2019-01-21 15:07:42 +08:00
|
|
|
|
}
|
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
ex, _ := regexp.Compile("\\d+\\.\\d+\\.\\d+\\.\\d+")
|
2019-01-01 14:41:57 +08:00
|
|
|
|
var peers []string
|
|
|
|
|
for _, peer := range Config().Peers {
|
2019-01-19 10:35:42 +08:00
|
|
|
|
if this.util.Contains(ip, ex.FindAllString(peer, -1)) ||
|
2019-01-20 11:05:22 +08:00
|
|
|
|
this.util.Contains("127.0.0.1", ex.FindAllString(peer, -1)) {
|
2019-01-01 14:41:57 +08:00
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
if strings.HasPrefix(peer, "http") {
|
|
|
|
|
peers = append(peers, peer)
|
|
|
|
|
} else {
|
|
|
|
|
peers = append(peers, "http://"+peer)
|
2018-12-30 17:17:40 +08:00
|
|
|
|
}
|
|
|
|
|
}
|
2019-01-01 14:41:57 +08:00
|
|
|
|
Config().Peers = peers
|
2019-01-22 09:32:37 +08:00
|
|
|
|
|
2019-01-16 10:28:51 +08:00
|
|
|
|
FormatStatInfo := func() {
|
2019-01-02 17:46:30 +08:00
|
|
|
|
|
2019-01-18 10:32:25 +08:00
|
|
|
|
if this.util.FileExists(CONST_STAT_FILE_NAME) {
|
|
|
|
|
if data, err = this.util.ReadBinFile(CONST_STAT_FILE_NAME); err != nil {
|
2019-01-02 17:46:30 +08:00
|
|
|
|
log.Error(err)
|
|
|
|
|
} else {
|
|
|
|
|
|
2019-01-16 10:28:51 +08:00
|
|
|
|
if err = json.Unmarshal(data, &stat); err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
} else {
|
|
|
|
|
for k, v := range stat {
|
|
|
|
|
switch v.(type) {
|
|
|
|
|
case float64:
|
|
|
|
|
vv := strings.Split(fmt.Sprintf("%f", v), ".")[0]
|
|
|
|
|
|
|
|
|
|
if count, err = strconv.ParseInt(vv, 10, 64); err != nil {
|
|
|
|
|
log.Error(err)
|
|
|
|
|
} else {
|
2019-01-18 10:32:25 +08:00
|
|
|
|
this.statMap.Put(k, count)
|
2019-01-16 10:28:51 +08:00
|
|
|
|
}
|
2019-01-02 17:46:30 +08:00
|
|
|
|
|
2019-01-16 10:28:51 +08:00
|
|
|
|
default:
|
2019-01-18 10:32:25 +08:00
|
|
|
|
this.statMap.Put(k, v)
|
2019-01-02 17:46:30 +08:00
|
|
|
|
|
2019-01-16 10:28:51 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
2019-01-02 17:46:30 +08:00
|
|
|
|
}
|
|
|
|
|
}
|
2019-01-16 10:28:51 +08:00
|
|
|
|
|
2019-01-18 23:58:38 +08:00
|
|
|
|
} else {
|
|
|
|
|
this.RepairStat()
|
2019-01-02 17:46:30 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
2019-02-01 11:31:19 +08:00
|
|
|
|
if !isReload {
|
2019-01-16 10:28:51 +08:00
|
|
|
|
FormatStatInfo()
|
|
|
|
|
}
|
|
|
|
|
//Timer
|
2019-01-02 17:46:30 +08:00
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
}
|
|
|
|
|
|
2018-12-30 18:18:42 +08:00
|
|
|
|
type HttpHandler struct {
|
|
|
|
|
}
|
2018-05-10 18:19:04 +08:00
|
|
|
|
|
2018-12-30 18:18:42 +08:00
|
|
|
|
func (HttpHandler) ServeHTTP(res http.ResponseWriter, req *http.Request) {
|
|
|
|
|
status_code := "200"
|
|
|
|
|
defer func(t time.Time) {
|
|
|
|
|
logStr := fmt.Sprintf("[Access] %s | %v | %s | %s | %s | %s |%s",
|
|
|
|
|
time.Now().Format("2006/01/02 - 15:04:05"),
|
|
|
|
|
res.Header(),
|
|
|
|
|
time.Since(t).String(),
|
2019-01-18 10:32:25 +08:00
|
|
|
|
server.util.GetClientIp(req),
|
2018-12-30 18:18:42 +08:00
|
|
|
|
req.Method,
|
|
|
|
|
status_code,
|
|
|
|
|
req.RequestURI,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
logacc.Info(logStr)
|
|
|
|
|
}(time.Now())
|
|
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
|
if err := recover(); err != nil {
|
|
|
|
|
status_code = "500"
|
|
|
|
|
res.WriteHeader(500)
|
|
|
|
|
print(err)
|
|
|
|
|
buff := debug.Stack()
|
|
|
|
|
log.Error(err)
|
|
|
|
|
log.Error(string(buff))
|
2018-05-10 13:31:34 +08:00
|
|
|
|
|
2018-12-30 18:18:42 +08:00
|
|
|
|
}
|
|
|
|
|
}()
|
|
|
|
|
|
|
|
|
|
http.DefaultServeMux.ServeHTTP(res, req)
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-16 10:28:51 +08:00
|
|
|
|
func (this *Server) Main() {
|
2017-09-09 16:40:55 +08:00
|
|
|
|
|
2018-12-30 17:17:40 +08:00
|
|
|
|
go func() {
|
|
|
|
|
for {
|
2019-01-20 11:05:22 +08:00
|
|
|
|
this.CheckFileAndSendToPeer(this.util.GetToDay(), CONST_Md5_ERROR_FILE_NAME, false)
|
2019-01-20 23:45:16 +08:00
|
|
|
|
//fmt.Println("CheckFileAndSendToPeer")
|
2019-01-02 17:51:08 +08:00
|
|
|
|
time.Sleep(time.Second * time.Duration(Config().RefreshInterval))
|
2019-01-21 15:07:42 +08:00
|
|
|
|
//this.util.RemoveEmptyDir(STORE_DIR)
|
2018-12-30 17:17:40 +08:00
|
|
|
|
}
|
|
|
|
|
}()
|
2019-01-18 22:49:09 +08:00
|
|
|
|
|
2019-01-28 17:01:26 +08:00
|
|
|
|
go this.CleanMd5SumCache()
|
2019-01-16 10:28:51 +08:00
|
|
|
|
go this.Check()
|
2019-01-17 01:05:27 +08:00
|
|
|
|
go this.Consumer()
|
2019-01-21 15:07:42 +08:00
|
|
|
|
go this.ConsumerDownLoad()
|
2019-01-19 20:44:54 +08:00
|
|
|
|
if Config().AutoRepair {
|
|
|
|
|
go func() {
|
|
|
|
|
for {
|
2019-01-20 11:05:22 +08:00
|
|
|
|
time.Sleep(time.Minute * 3)
|
2019-01-19 20:44:54 +08:00
|
|
|
|
this.AutoRepair(false)
|
2019-01-20 11:05:22 +08:00
|
|
|
|
time.Sleep(time.Minute * 60)
|
2019-01-19 20:44:54 +08:00
|
|
|
|
}
|
|
|
|
|
}()
|
|
|
|
|
|
|
|
|
|
}
|
2019-01-16 10:28:51 +08:00
|
|
|
|
|
2019-02-11 16:02:34 +08:00
|
|
|
|
if Config().SupportGroupManage {
|
|
|
|
|
http.HandleFunc(fmt.Sprintf("/%s", Config().Group), this.Index)
|
|
|
|
|
http.HandleFunc(fmt.Sprintf("/%s/check_file_exist", Config().Group), this.CheckFileExist)
|
|
|
|
|
http.HandleFunc(fmt.Sprintf("/%s/upload", Config().Group), this.Upload)
|
|
|
|
|
http.HandleFunc(fmt.Sprintf("/%s/delete", Config().Group), this.RemoveFile)
|
|
|
|
|
http.HandleFunc(fmt.Sprintf("/%s/sync", Config().Group), this.Sync)
|
|
|
|
|
http.HandleFunc(fmt.Sprintf("/%s/stat", Config().Group), this.Stat)
|
|
|
|
|
http.HandleFunc(fmt.Sprintf("/%s/repair_stat", Config().Group), this.RepairStatWeb)
|
|
|
|
|
http.HandleFunc(fmt.Sprintf("/%s/status", Config().Group), this.Status)
|
|
|
|
|
http.HandleFunc(fmt.Sprintf("/%s/repair", Config().Group), this.Repair)
|
2019-02-12 11:08:19 +08:00
|
|
|
|
http.HandleFunc(fmt.Sprintf("/%s/repair_fileinfo", Config().Group), this.RepairFileInfo)
|
2019-02-11 16:02:34 +08:00
|
|
|
|
http.HandleFunc(fmt.Sprintf("/%s/reload", Config().Group), this.Reload)
|
|
|
|
|
http.HandleFunc(fmt.Sprintf("/%s/syncfile", Config().Group), this.SyncFile)
|
|
|
|
|
http.HandleFunc(fmt.Sprintf("/%s/syncfile_info", Config().Group), this.SyncFileInfo)
|
|
|
|
|
http.HandleFunc(fmt.Sprintf("/%s/get_md5s_by_date", Config().Group), this.GetMd5sForWeb)
|
|
|
|
|
http.HandleFunc(fmt.Sprintf("/%s/receive_md5s", Config().Group), this.ReceiveMd5s)
|
|
|
|
|
|
2019-02-02 13:09:25 +08:00
|
|
|
|
} else {
|
2019-02-11 16:02:34 +08:00
|
|
|
|
http.HandleFunc("/", this.Index)
|
|
|
|
|
http.HandleFunc("/check_file_exist", this.CheckFileExist)
|
2019-02-02 13:09:25 +08:00
|
|
|
|
http.HandleFunc("/upload", this.Upload)
|
2019-02-11 16:02:34 +08:00
|
|
|
|
http.HandleFunc("/delete", this.RemoveFile)
|
|
|
|
|
http.HandleFunc("/sync", this.Sync)
|
|
|
|
|
http.HandleFunc("/stat", this.Stat)
|
|
|
|
|
http.HandleFunc("/repair_stat", this.RepairStatWeb)
|
|
|
|
|
http.HandleFunc("/status", this.Status)
|
|
|
|
|
http.HandleFunc("/repair", this.Repair)
|
2019-02-12 11:08:19 +08:00
|
|
|
|
http.HandleFunc("/repair_fileinfo", this.RepairFileInfo)
|
2019-02-11 16:02:34 +08:00
|
|
|
|
http.HandleFunc("/reload", this.Reload)
|
|
|
|
|
http.HandleFunc("/syncfile", this.SyncFile)
|
|
|
|
|
http.HandleFunc("/syncfile_info", this.SyncFileInfo)
|
|
|
|
|
http.HandleFunc("/get_md5s_by_date", this.GetMd5sForWeb)
|
|
|
|
|
http.HandleFunc("/receive_md5s", this.ReceiveMd5s)
|
2019-02-02 13:09:25 +08:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-16 10:28:51 +08:00
|
|
|
|
http.HandleFunc("/"+Config().Group+"/", this.Download)
|
2019-01-01 14:41:57 +08:00
|
|
|
|
fmt.Println("Listen on " + Config().Addr)
|
2019-01-04 10:22:03 +08:00
|
|
|
|
err := http.ListenAndServe(Config().Addr, new(HttpHandler))
|
|
|
|
|
log.Error(err)
|
|
|
|
|
fmt.Println(err)
|
2019-01-16 10:28:51 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func main() {
|
2019-01-22 09:32:37 +08:00
|
|
|
|
|
2019-01-16 10:28:51 +08:00
|
|
|
|
server.Main()
|
2019-01-28 17:01:26 +08:00
|
|
|
|
|
2017-09-09 16:40:55 +08:00
|
|
|
|
}
|