go-fastdfs/fileserver.go

3459 lines
92 KiB
Go
Raw Normal View History

2017-09-09 16:40:55 +08:00
package main
import (
2019-02-23 10:07:00 +08:00
"bytes"
2018-05-10 13:31:34 +08:00
"crypto/md5"
"crypto/rand"
2019-01-23 21:39:13 +08:00
"crypto/sha1"
2018-05-10 13:31:34 +08:00
"encoding/base64"
2018-12-30 17:17:40 +08:00
"errors"
2018-05-10 18:19:04 +08:00
"flag"
2017-09-09 16:40:55 +08:00
"fmt"
2019-02-18 17:32:38 +08:00
"github.com/astaxie/beego/httplib"
"github.com/deckarep/golang-set"
2019-02-23 10:07:00 +08:00
_ "github.com/eventials/go-tus"
2019-02-18 17:32:38 +08:00
"github.com/json-iterator/go"
log "github.com/sjqzhang/seelog"
2019-02-20 11:57:32 +08:00
"github.com/sjqzhang/tusd"
"github.com/sjqzhang/tusd/filestore"
2019-02-20 20:14:21 +08:00
"github.com/syndtr/goleveldb/leveldb"
2019-02-23 23:01:40 +08:00
"github.com/syndtr/goleveldb/leveldb/util"
2017-09-09 16:40:55 +08:00
"io"
2018-12-30 17:17:40 +08:00
"io/ioutil"
2019-02-18 17:32:38 +08:00
slog "log"
random "math/rand"
2018-12-30 17:17:40 +08:00
"mime/multipart"
"net"
2018-05-10 13:31:34 +08:00
"net/http"
2019-01-17 01:05:27 +08:00
_ "net/http/pprof"
2019-01-09 12:05:20 +08:00
"net/smtp"
2019-01-02 20:37:50 +08:00
"net/url"
2019-01-22 09:32:37 +08:00
"os"
2019-01-09 12:05:20 +08:00
"os/signal"
2019-01-01 23:31:14 +08:00
"path"
2018-12-31 17:46:24 +08:00
"path/filepath"
2018-12-30 17:17:40 +08:00
"reflect"
2019-01-22 09:32:37 +08:00
"regexp"
2019-02-18 17:32:38 +08:00
"runtime"
2018-12-30 18:18:42 +08:00
"runtime/debug"
2019-01-02 17:46:30 +08:00
"strconv"
2018-05-10 13:31:34 +08:00
"strings"
2019-01-22 09:32:37 +08:00
"sync"
2018-12-30 17:17:40 +08:00
"sync/atomic"
2019-01-22 09:32:37 +08:00
"syscall"
2018-05-10 13:31:34 +08:00
"time"
2018-12-30 17:17:40 +08:00
"unsafe"
2018-05-10 13:31:34 +08:00
)
2018-05-10 18:19:04 +08:00
var staticHandler http.Handler
2019-01-20 23:45:16 +08:00
var json = jsoniter.ConfigCompatibleWithStandardLibrary
2019-02-15 17:48:55 +08:00
var server *Server
2018-12-30 18:18:42 +08:00
var logacc log.LoggerInterface
2019-02-26 16:57:41 +08:00
var FOLDERS = []string{DATA_DIR, STORE_DIR, CONF_DIR, STATIC_DIR}
2019-02-25 16:24:28 +08:00
var CONST_QUEUE_SIZE = 10000
2018-12-30 17:17:40 +08:00
var (
2019-02-25 10:38:32 +08:00
FileName string
ptr unsafe.Pointer
DOCKER_DIR = ""
STORE_DIR = STORE_DIR_NAME
CONF_DIR = CONF_DIR_NAME
LOG_DIR = LOG_DIR_NAME
DATA_DIR = DATA_DIR_NAME
2019-02-26 16:57:41 +08:00
STATIC_DIR = STATIC_DIR_NAME
2019-02-25 10:38:32 +08:00
LARGE_DIR_NAME = "haystack"
LARGE_DIR = STORE_DIR + "/haystack"
2019-02-24 14:09:06 +08:00
CONST_LEVELDB_FILE_NAME = DATA_DIR + "/fileserver.db"
CONST_LOG_LEVELDB_FILE_NAME = DATA_DIR + "/log.db"
2019-02-25 10:38:32 +08:00
CONST_STAT_FILE_NAME = DATA_DIR + "/stat.json"
CONST_CONF_FILE_NAME = CONF_DIR + "/cfg.json"
logConfigStr = `
2019-02-15 17:48:55 +08:00
<seelog type="asynctimer" asyncinterval="1000" minlevel="trace" maxlevel="error">
<outputs formatid="common">
<buffered formatid="common" size="1048576" flushperiod="1000">
<rollingfile type="size" filename="{DOCKER_DIR}log/fileserver.log" maxsize="104857600" maxrolls="10"/>
</buffered>
</outputs>
<formats>
<format id="common" format="%Date %Time [%LEV] [%File:%Line] [%Func] %Msg%n" />
</formats>
</seelog>
`
logAccessConfigStr = `
<seelog type="asynctimer" asyncinterval="1000" minlevel="trace" maxlevel="error">
<outputs formatid="common">
<buffered formatid="common" size="1048576" flushperiod="1000">
<rollingfile type="size" filename="{DOCKER_DIR}log/access.log" maxsize="104857600" maxrolls="10"/>
</buffered>
</outputs>
<formats>
<format id="common" format="%Date %Time [%LEV] [%File:%Line] [%Func] %Msg%n" />
</formats>
</seelog>
`
)
const (
2019-02-25 10:38:32 +08:00
STORE_DIR_NAME = "files"
LOG_DIR_NAME = "log"
DATA_DIR_NAME = "data"
CONF_DIR_NAME = "conf"
2019-02-26 16:57:41 +08:00
STATIC_DIR_NAME = "static"
2019-02-25 10:38:32 +08:00
CONST_STAT_FILE_COUNT_KEY = "fileCount"
CONST_BIG_UPLOAD_PATH_SUFFIX = "/big/upload/"
2019-01-02 17:46:30 +08:00
CONST_STAT_FILE_TOTAL_SIZE_KEY = "totalSize"
2019-02-25 10:38:32 +08:00
CONST_Md5_ERROR_FILE_NAME = "errors.md5"
CONST_Md5_QUEUE_FILE_NAME = "queue.md5"
CONST_FILE_Md5_FILE_NAME = "files.md5"
CONST_REMOME_Md5_FILE_NAME = "removes.md5"
CONST_SMALL_FILE_SIZE = 1024 * 1024
CONST_MESSAGE_CLUSTER_IP = "Can only be called by the cluster ip,current ip:%s"
cfgJson = `{
2019-01-03 22:56:46 +08:00
"绑定端号": "端口",
"addr": ":8080",
2019-02-03 23:15:35 +08:00
"PeerID": "集群内唯一,请使用0-9的单字符默认自动生成",
2019-02-01 11:31:19 +08:00
"peer_id": "%s",
2019-02-03 23:15:35 +08:00
"本主机地址": "本机http地址,默认自动生成,必段为内网,自动生成不为内网请自行修改,下同",
2019-02-02 13:09:25 +08:00
"host": "%s",
2019-02-15 17:48:55 +08:00
"集群": "集群列表,注意为了高可用IP必须不能是同一个,同一不会自动备份且不能为127.0.0.1,且必须为内网IP默认自动生成",
2019-01-03 22:56:46 +08:00
"peers": ["%s"],
2019-02-03 13:51:40 +08:00
"组号": "用于区别不同的集群(上传或下载)与support_group_upload配合使用,带在下载路径中",
2019-01-03 22:56:46 +08:00
"group": "group1",
2019-02-14 19:02:51 +08:00
"是否合并小文件": "默认不合并,合并可以解决inode不够用的情况当前对于小于1M文件进行合并",
2019-02-21 10:33:21 +08:00
"enable_merge_small_file": false,
2019-01-30 17:30:18 +08:00
"重试同步失败文件的时间": "单位秒",
2019-01-21 16:58:16 +08:00
"refresh_interval": 1800,
2019-01-30 17:30:18 +08:00
"是否自动重命名": "默认不自动重命名,使用原文件名",
2019-01-03 22:56:46 +08:00
"rename_file": false,
2019-02-15 17:48:55 +08:00
"是否支持web上传,方便调试": "默认支持web上传",
2019-01-03 22:56:46 +08:00
"enable_web_upload": true,
2019-01-30 17:30:18 +08:00
"是否支持非日期路径": "默认支持非日期路径,也即支持自定义路径,需要上传文件时指定path",
2019-01-04 10:22:03 +08:00
"enable_custom_path": true,
2019-01-30 17:30:18 +08:00
"下载域名": "用于外网下载文件的域名,不包含http://",
2019-01-03 22:56:46 +08:00
"download_domain": "",
2019-02-02 13:09:25 +08:00
"场景列表": "当设定后,用户指的场景必项在列表中,默认不做限制",
"scenes": [],
"默认场景": "默认default",
"default_scene": "default",
2019-02-03 13:51:40 +08:00
"是否显示目录": "默认显示,方便调试用,上线时请关闭",
2019-01-09 12:05:20 +08:00
"show_dir": true,
2019-02-02 13:09:25 +08:00
"邮件配置": "",
"mail": {
"user": "abc@163.com",
"password": "abc",
"host": "smtp.163.com:25"
2019-01-09 12:05:20 +08:00
},
2019-02-02 13:09:25 +08:00
"告警接收邮件列表": "接收人数组",
"alram_receivers": [],
"告警接收URL": "方法post,参数:subjet,message",
"alarm_url": "",
"下载是否需带token": "真假",
"download_use_token": false,
"下载token过期时间": "单位秒",
"download_token_expire": 600,
"是否自动修复": "在超过1亿文件时出现性能问题取消此选项请手动按天同步请查看FAQ",
"auto_repair": true,
"文件去重算法md5可能存在冲突默认md5": "sha1|md5",
"file_sum_arithmetic": "md5",
2019-02-12 11:08:19 +08:00
"是否支持按组(集群)管理,主要用途是Nginx支持多集群": "默认不支持,不支持时路径为http://10.1.5.4:8080/action,支持时为http://10.1.5.4:8080/group(配置中的group参数)/action,action为动作名如status,delete,sync等",
2019-02-11 16:02:34 +08:00
"support_group_manage": false,
"管理ip列表": "用于管理集的ip白名单,",
2019-02-26 18:51:39 +08:00
"admin_ips": ["127.0.0.1"],
"是否启用迁移": "默认不启用",
2019-02-27 13:12:54 +08:00
"enable_migrate": false,
"文件是否去重": "默认去重",
"enable_distinct_file": true
2019-01-01 14:41:57 +08:00
}
2018-12-30 17:17:40 +08:00
`
2017-09-09 16:40:55 +08:00
)
2018-05-10 13:31:34 +08:00
type Common struct {
}
2018-05-10 18:19:04 +08:00
type Server struct {
2019-01-21 15:07:42 +08:00
ldb *leveldb.DB
2019-02-24 14:09:06 +08:00
logDB *leveldb.DB
2019-01-21 15:07:42 +08:00
util *Common
statMap *CommonMap
2019-01-28 17:01:26 +08:00
sumMap *CommonMap //map[string]mapset.Set
2019-01-21 15:07:42 +08:00
queueToPeers chan FileInfo
queueFromPeers chan FileInfo
2019-02-23 10:07:00 +08:00
queueFileLog chan *FileLog
2019-02-13 10:09:41 +08:00
lockMap *CommonMap
2019-02-25 10:38:32 +08:00
curDate string
host string
2018-12-30 17:17:40 +08:00
}
type FileInfo struct {
2019-01-21 15:07:42 +08:00
Name string `json:"name"`
ReName string `json:"rename"`
Path string `json:"path"`
Md5 string `json:"md5"`
Size int64 `json:"size"`
Peers []string `json:"peers"`
Scene string `json:"scene"`
TimeStamp int64 `json:"timeStamp"`
2019-02-14 19:02:51 +08:00
OffSet int64 `json:"offset"`
2018-12-30 17:17:40 +08:00
}
2019-02-23 10:07:00 +08:00
type FileLog struct {
FileInfo *FileInfo
FileName string
}
2019-02-12 12:55:55 +08:00
type JsonResult struct {
2019-01-21 16:58:16 +08:00
Message string `json:"message"`
Status string `json:"status"`
Data interface{} `json:"data"`
2019-01-09 13:13:21 +08:00
}
2019-01-08 16:53:03 +08:00
type FileResult struct {
2019-01-09 16:04:15 +08:00
Url string `json:"url"`
Md5 string `json:"md5"`
Path string `json:"path"`
Domain string `json:"domain"`
Scene string `json:"scene"`
//Just for Compatibility
Scenes string `json:"scenes"`
Retmsg string `json:"retmsg"`
Retcode int `json:"retcode"`
Src string `json:"src"`
2019-01-08 16:53:03 +08:00
}
2019-01-09 12:05:20 +08:00
type Mail struct {
User string `json:"user"`
Password string `json:"password"`
Host string `json:"host"`
}
type StatDateFileInfo struct {
Date string `json:"date"`
TotalSize int64 `json:"totalSize"`
FileCount int64 `json:"fileCount"`
}
2018-12-30 17:17:40 +08:00
type GloablConfig struct {
2019-02-14 19:02:51 +08:00
Addr string `json:"addr"`
Peers []string `json:"peers"`
Group string `json:"group"`
RenameFile bool `json:"rename_file"`
ShowDir bool `json:"show_dir"`
RefreshInterval int `json:"refresh_interval"`
EnableWebUpload bool `json:"enable_web_upload"`
DownloadDomain string `json:"download_domain"`
EnableCustomPath bool `json:"enable_custom_path"`
Scenes []string `json:"scenes"`
AlramReceivers []string `json:"alram_receivers"`
DefaultScene string `json:"default_scene"`
Mail Mail `json:"mail"`
AlarmUrl string `json:"alarm_url"`
DownloadUseToken bool `json:"download_use_token"`
DownloadTokenExpire int `json:"download_token_expire"`
QueueSize int `json:"queue_size"`
AutoRepair bool `json:"auto_repair"`
Host string `json:"host"`
FileSumArithmetic string `json:"file_sum_arithmetic"`
PeerId string `json:"peer_id"`
SupportGroupManage bool `json:"support_group_manage"`
AdminIps []string `json:"admin_ips"`
EnableMergeSmallFile bool `json:"enable_merge_small_file"`
2019-02-26 18:51:39 +08:00
EnableMigrate bool `json:"enable_migrate"`
2019-02-27 13:12:54 +08:00
EnableDistinctFile bool `json:"enable_distinct_file"`
2019-01-01 14:41:57 +08:00
}
func NewServer() *Server {
2019-01-18 10:32:25 +08:00
var (
server *Server
2019-01-22 19:26:05 +08:00
err error
2019-01-18 10:32:25 +08:00
)
server = &Server{
2019-01-21 15:07:42 +08:00
util: &Common{},
2019-02-13 10:09:41 +08:00
statMap: NewCommonMap(0),
lockMap: NewCommonMap(0),
2019-01-21 15:07:42 +08:00
queueToPeers: make(chan FileInfo, CONST_QUEUE_SIZE),
queueFromPeers: make(chan FileInfo, CONST_QUEUE_SIZE),
2019-02-25 16:24:28 +08:00
queueFileLog: make(chan *FileLog, CONST_QUEUE_SIZE),
2019-02-25 10:38:32 +08:00
sumMap: NewCommonMap(365 * 3),
2019-01-18 10:32:25 +08:00
}
2019-02-25 16:24:28 +08:00
defaultTransport := &http.Transport{
DisableKeepAlives: true,
Dial: httplib.TimeoutDialer(time.Second*6, time.Second*60),
MaxIdleConns: 100,
MaxIdleConnsPerHost: 100,
}
settins := httplib.BeegoHTTPSettings{
2019-02-25 16:24:28 +08:00
UserAgent: "Go-FastDFS",
2019-01-18 22:49:09 +08:00
ConnectTimeout: 10 * time.Second,
ReadWriteTimeout: 10 * time.Second,
Gzip: true,
DumpBody: true,
2019-02-25 16:24:28 +08:00
Transport: defaultTransport,
2019-01-18 22:49:09 +08:00
}
httplib.SetDefaultSetting(settins)
2019-01-19 20:44:54 +08:00
server.statMap.Put(CONST_STAT_FILE_COUNT_KEY, int64(0))
2019-01-20 11:05:22 +08:00
server.statMap.Put(CONST_STAT_FILE_TOTAL_SIZE_KEY, int64(0))
server.statMap.Put(server.util.GetToDay()+"_"+CONST_STAT_FILE_COUNT_KEY, int64(0))
server.statMap.Put(server.util.GetToDay()+"_"+CONST_STAT_FILE_TOTAL_SIZE_KEY, int64(0))
server.curDate = server.util.GetToDay()
2019-02-24 14:09:06 +08:00
server.ldb, err = leveldb.OpenFile(CONST_LEVELDB_FILE_NAME, nil)
if err != nil {
fmt.Println(err)
panic(err)
log.Error(err)
}
server.logDB, err = leveldb.OpenFile(CONST_LOG_LEVELDB_FILE_NAME, nil)
2019-01-22 09:32:37 +08:00
if err != nil {
2019-01-22 19:26:05 +08:00
fmt.Println(err)
2019-01-22 09:32:37 +08:00
panic(err)
2019-01-22 19:26:05 +08:00
log.Error(err)
2019-01-22 09:32:37 +08:00
}
2019-01-18 10:32:25 +08:00
return server
}
2019-01-02 17:46:30 +08:00
type CommonMap struct {
sync.Mutex
m map[string]interface{}
}
2019-02-13 10:09:41 +08:00
func NewCommonMap(size int) *CommonMap {
2019-02-14 19:02:51 +08:00
if size > 0 {
2019-02-13 10:09:41 +08:00
return &CommonMap{m: make(map[string]interface{}, size)}
} else {
return &CommonMap{m: make(map[string]interface{})}
}
}
2019-01-02 17:46:30 +08:00
func (s *CommonMap) GetValue(k string) (interface{}, bool) {
s.Lock()
defer s.Unlock()
v, ok := s.m[k]
return v, ok
}
func (s *CommonMap) Put(k string, v interface{}) {
s.Lock()
defer s.Unlock()
s.m[k] = v
}
2019-02-12 17:22:47 +08:00
func (s *CommonMap) LockKey(k string) {
s.Lock()
2019-02-13 10:09:41 +08:00
if v, ok := s.m[k]; ok {
s.m[k+"_lock_"] = true
2019-02-12 17:22:47 +08:00
s.Unlock()
v.(*sync.Mutex).Lock()
} else {
2019-02-13 10:09:41 +08:00
s.m[k] = &sync.Mutex{}
v = s.m[k]
s.m[k+"_lock_"] = true
2019-02-12 17:22:47 +08:00
s.Unlock()
v.(*sync.Mutex).Lock()
}
}
func (s *CommonMap) UnLockKey(k string) {
s.Lock()
2019-02-13 10:09:41 +08:00
if v, ok := s.m[k]; ok {
2019-02-12 17:22:47 +08:00
v.(*sync.Mutex).Unlock()
2019-02-13 10:09:41 +08:00
s.m[k+"_lock_"] = false
2019-02-12 17:22:47 +08:00
}
s.Unlock()
}
func (s *CommonMap) IsLock(k string) bool {
s.Lock()
2019-02-13 10:09:41 +08:00
if v, ok := s.m[k+"_lock_"]; ok {
2019-02-12 17:22:47 +08:00
s.Unlock()
return v.(bool)
}
s.Unlock()
return false
}
2019-01-28 17:01:26 +08:00
func (s *CommonMap) Keys() []string {
s.Lock()
keys := make([]string, len(s.m))
defer s.Unlock()
for k, _ := range s.m {
keys = append(keys, k)
}
return keys
}
2019-01-20 23:45:16 +08:00
func (s *CommonMap) Clear() {
s.Lock()
defer s.Unlock()
2019-01-21 15:07:42 +08:00
s.m = make(map[string]interface{})
2019-01-20 23:45:16 +08:00
}
func (s *CommonMap) Remove(key string) {
s.Lock()
defer s.Unlock()
2019-01-21 15:07:42 +08:00
if _, ok := s.m[key]; ok {
delete(s.m, key)
2019-01-20 23:45:16 +08:00
}
}
func (s *CommonMap) AddUniq(key string) {
s.Lock()
defer s.Unlock()
if _, ok := s.m[key]; !ok {
s.m[key] = nil
}
}
2019-01-02 17:46:30 +08:00
func (s *CommonMap) AddCount(key string, count int) {
s.Lock()
defer s.Unlock()
if _v, ok := s.m[key]; ok {
v := _v.(int)
v = v + count
s.m[key] = v
} else {
s.m[key] = 1
}
}
func (s *CommonMap) AddCountInt64(key string, count int64) {
s.Lock()
defer s.Unlock()
if _v, ok := s.m[key]; ok {
v := _v.(int64)
v = v + count
s.m[key] = v
} else {
s.m[key] = count
}
}
func (s *CommonMap) Add(key string) {
s.Lock()
defer s.Unlock()
if _v, ok := s.m[key]; ok {
v := _v.(int)
v = v + 1
s.m[key] = v
} else {
s.m[key] = 1
}
}
func (s *CommonMap) Zero() {
s.Lock()
defer s.Unlock()
for k := range s.m {
2019-01-02 17:46:30 +08:00
s.m[k] = 0
}
}
2019-01-20 23:45:16 +08:00
func (s *CommonMap) Contains(i ...interface{}) bool {
s.Lock()
defer s.Unlock()
for _, val := range i {
if _, ok := s.m[val.(string)]; !ok {
return false
}
}
return true
}
2019-01-02 17:46:30 +08:00
func (s *CommonMap) Get() map[string]interface{} {
s.Lock()
defer s.Unlock()
m := make(map[string]interface{})
for k, v := range s.m {
m[k] = v
}
return m
}
2019-01-01 14:41:57 +08:00
func Config() *GloablConfig {
return (*GloablConfig)(atomic.LoadPointer(&ptr))
2018-12-30 17:17:40 +08:00
}
func ParseConfig(filePath string) {
var (
data []byte
)
if filePath == "" {
data = []byte(strings.TrimSpace(cfgJson))
} else {
file, err := os.Open(filePath)
if err != nil {
panic(fmt.Sprintln("open file path:", filePath, "error:", err))
}
defer file.Close()
FileName = filePath
data, err = ioutil.ReadAll(file)
if err != nil {
panic(fmt.Sprintln("file path:", filePath, " read all error:", err))
}
}
var c GloablConfig
if err := json.Unmarshal(data, &c); err != nil {
panic(fmt.Sprintln("file path:", filePath, "json unmarshal error:", err))
}
log.Info(c)
atomic.StorePointer(&ptr, unsafe.Pointer(&c))
log.Info("config parse success")
2018-05-10 18:19:04 +08:00
}
2018-05-10 13:31:34 +08:00
func (this *Common) GetUUID() string {
b := make([]byte, 48)
if _, err := io.ReadFull(rand.Reader, b); err != nil {
return ""
}
id := this.MD5(base64.URLEncoding.EncodeToString(b))
return fmt.Sprintf("%s-%s-%s-%s-%s", id[0:8], id[8:12], id[12:16], id[16:20], id[20:])
}
2019-02-18 17:32:38 +08:00
func (this *Common) CopyFile(src, dst string) (int64, error) {
sourceFileStat, err := os.Stat(src)
if err != nil {
return 0, err
}
if !sourceFileStat.Mode().IsRegular() {
return 0, fmt.Errorf("%s is not a regular file", src)
}
source, err := os.Open(src)
if err != nil {
return 0, err
}
defer source.Close()
destination, err := os.Create(dst)
if err != nil {
return 0, err
}
defer destination.Close()
nBytes, err := io.Copy(destination, source)
return nBytes, err
}
2019-02-01 11:31:19 +08:00
func (this *Common) RandInt(min, max int) int {
return func(min, max int) int {
r := random.New(random.NewSource(time.Now().UnixNano()))
if min >= max {
return max
}
return r.Intn(max-min) + min
}(min, max)
}
2019-01-09 12:05:20 +08:00
func (this *Common) GetToDay() string {
return time.Now().Format("20060102")
}
2019-01-21 15:07:42 +08:00
func (this *Common) UrlEncode(v interface{}) string {
switch v.(type) {
case string:
2019-01-21 16:58:16 +08:00
m := make(map[string]string)
m["name"] = v.(string)
return strings.Replace(this.UrlEncodeFromMap(m), "name=", "", 1)
2019-01-21 15:07:42 +08:00
case map[string]string:
return this.UrlEncodeFromMap(v.(map[string]string))
default:
2019-01-21 16:58:16 +08:00
return fmt.Sprintf("%v", v)
2019-01-21 15:07:42 +08:00
}
}
func (this *Common) UrlEncodeFromMap(m map[string]string) string {
vv := url.Values{}
for k, v := range m {
vv.Add(k, v)
}
return vv.Encode()
}
func (this *Common) UrlDecodeToMap(body string) (map[string]string, error) {
var (
err error
m map[string]string
v url.Values
)
m = make(map[string]string)
if v, err = url.ParseQuery(body); err != nil {
return m, err
}
for _k, _v := range v {
if len(_v) > 0 {
m[_k] = _v[0]
}
}
return m, nil
}
2019-01-19 09:13:00 +08:00
func (this *Common) GetDayFromTimeStamp(timeStamp int64) string {
return time.Unix(timeStamp, 0).Format("20060102")
2019-01-19 09:13:00 +08:00
}
func (this *Common) StrToMapSet(str string, sep string) mapset.Set {
result := mapset.NewSet()
for _, v := range strings.Split(str, sep) {
2019-01-18 19:05:18 +08:00
result.Add(v)
}
return result
}
func (this *Common) MapSetToStr(set mapset.Set, sep string) string {
2019-01-18 19:05:18 +08:00
var (
ret []string
)
for v := range set.Iter() {
ret = append(ret, v.(string))
2019-01-18 19:05:18 +08:00
}
return strings.Join(ret, sep)
2019-01-18 19:05:18 +08:00
}
2018-12-30 17:17:40 +08:00
func (this *Common) GetPulicIP() string {
2019-01-21 15:07:42 +08:00
var (
2019-01-21 16:58:16 +08:00
err error
2019-01-21 15:07:42 +08:00
conn net.Conn
)
2019-01-21 16:58:16 +08:00
if conn, err = net.Dial("udp", "8.8.8.8:80"); err != nil {
2019-01-21 15:07:42 +08:00
return "127.0.0.1"
}
2018-12-30 17:17:40 +08:00
defer conn.Close()
localAddr := conn.LocalAddr().String()
idx := strings.LastIndex(localAddr, ":")
return localAddr[0:idx]
}
2018-05-10 13:31:34 +08:00
func (this *Common) MD5(str string) string {
md := md5.New()
md.Write([]byte(str))
return fmt.Sprintf("%x", md.Sum(nil))
}
2018-12-30 17:17:40 +08:00
func (this *Common) GetFileMd5(file *os.File) string {
file.Seek(0, 0)
md5h := md5.New()
io.Copy(md5h, file)
sum := fmt.Sprintf("%x", md5h.Sum(nil))
return sum
}
2019-01-23 21:39:13 +08:00
func (this *Common) GetFileSum(file *os.File, alg string) string {
alg = strings.ToLower(alg)
if alg == "sha1" {
return this.GetFileSha1Sum(file)
} else {
return this.GetFileMd5(file)
}
2019-02-12 11:08:19 +08:00
}
2019-02-12 12:55:55 +08:00
func (this *Common) GetFileSumByName(filepath string, alg string) (string, error) {
2019-02-12 11:08:19 +08:00
var (
2019-02-12 12:55:55 +08:00
err error
2019-02-12 11:08:19 +08:00
file *os.File
)
2019-02-12 12:55:55 +08:00
file, err = os.Open(filepath)
if err != nil {
return "", err
2019-02-12 11:08:19 +08:00
}
defer file.Close()
alg = strings.ToLower(alg)
if alg == "sha1" {
2019-02-12 12:55:55 +08:00
return this.GetFileSha1Sum(file), nil
2019-02-12 11:08:19 +08:00
} else {
2019-02-12 12:55:55 +08:00
return this.GetFileMd5(file), nil
2019-02-12 11:08:19 +08:00
}
2019-01-23 21:39:13 +08:00
}
func (this *Common) GetFileSha1Sum(file *os.File) string {
file.Seek(0, 0)
md5h := sha1.New()
io.Copy(md5h, file)
sum := fmt.Sprintf("%x", md5h.Sum(nil))
return sum
}
2019-02-14 19:02:51 +08:00
func (this *Common) WriteFileByOffSet(filepath string, offset int64, data []byte) (error) {
var (
err error
file *os.File
count int
)
file, err = os.OpenFile(filepath, os.O_CREATE|os.O_RDWR, 0666)
if err != nil {
return err
}
defer file.Close()
count, err = file.WriteAt(data, offset)
if err != nil {
return err
}
if count != len(data) {
return errors.New(fmt.Sprintf("write %s error", filepath))
}
return nil
}
func (this *Common) ReadFileByOffSet(filepath string, offset int64, length int) ([]byte, error) {
var (
err error
file *os.File
result []byte
count int
)
file, err = os.Open(filepath)
if err != nil {
return nil, err
}
defer file.Close()
result = make([]byte, length)
count, err = file.ReadAt(result, offset)
if err != nil {
return nil, err
}
if count != length {
return nil, errors.New("read error")
}
return result, nil
}
2018-12-30 17:17:40 +08:00
func (this *Common) Contains(obj interface{}, arrayobj interface{}) bool {
targetValue := reflect.ValueOf(arrayobj)
switch reflect.TypeOf(arrayobj).Kind() {
case reflect.Slice, reflect.Array:
for i := 0; i < targetValue.Len(); i++ {
if targetValue.Index(i).Interface() == obj {
return true
}
}
case reflect.Map:
if targetValue.MapIndex(reflect.ValueOf(obj)).IsValid() {
return true
}
}
return false
}
2018-05-10 18:19:04 +08:00
func (this *Common) FileExists(fileName string) bool {
_, err := os.Stat(fileName)
return err == nil
}
2019-01-01 14:41:57 +08:00
func (this *Common) WriteFile(path string, data string) bool {
2019-01-28 19:51:52 +08:00
if err := ioutil.WriteFile(path, []byte(data), 0775); err == nil {
2019-01-01 14:41:57 +08:00
return true
} else {
return false
}
}
func (this *Common) WriteBinFile(path string, data []byte) bool {
2019-01-28 19:51:52 +08:00
if err := ioutil.WriteFile(path, data, 0775); err == nil {
2019-01-01 14:41:57 +08:00
return true
} else {
return false
}
}
2019-01-02 17:46:30 +08:00
func (this *Common) IsExist(filename string) bool {
_, err := os.Stat(filename)
return err == nil || os.IsExist(err)
}
2019-01-16 15:30:53 +08:00
func (this *Common) Match(matcher string, content string) []string {
var result []string
if reg, err := regexp.Compile(matcher); err == nil {
result = reg.FindAllString(content, -1)
}
return result
}
2019-01-02 17:46:30 +08:00
func (this *Common) ReadBinFile(path string) ([]byte, error) {
if this.IsExist(path) {
fi, err := os.Open(path)
if err != nil {
return nil, err
}
defer fi.Close()
return ioutil.ReadAll(fi)
} else {
return nil, errors.New("not found")
}
}
2018-12-31 17:46:24 +08:00
func (this *Common) RemoveEmptyDir(pathname string) {
2019-01-02 17:46:30 +08:00
defer func() {
if re := recover(); re != nil {
buffer := debug.Stack()
log.Error("postFileToPeer")
log.Error(re)
log.Error(string(buffer))
}
}()
2018-12-31 17:46:24 +08:00
handlefunc := func(file_path string, f os.FileInfo, err error) error {
if f.IsDir() {
files, _ := ioutil.ReadDir(file_path)
2019-01-02 17:46:30 +08:00
if len(files) == 0 && file_path != pathname {
2018-12-31 17:46:24 +08:00
os.Remove(file_path)
}
}
return nil
}
fi, _ := os.Stat(pathname)
if fi.IsDir() {
filepath.Walk(pathname, handlefunc)
}
}
2019-01-17 21:46:29 +08:00
func (this *Common) JsonEncodePretty(o interface{}) string {
resp := ""
switch o.(type) {
case map[string]interface{}:
if data, err := json.Marshal(o); err == nil {
resp = string(data)
}
case map[string]string:
if data, err := json.Marshal(o); err == nil {
resp = string(data)
}
case []interface{}:
if data, err := json.Marshal(o); err == nil {
resp = string(data)
}
case []string:
if data, err := json.Marshal(o); err == nil {
resp = string(data)
}
case string:
resp = o.(string)
default:
if data, err := json.Marshal(o); err == nil {
resp = string(data)
}
}
var v interface{}
if ok := json.Unmarshal([]byte(resp), &v); ok == nil {
if buf, ok := json.MarshalIndent(v, "", " "); ok == nil {
resp = string(buf)
}
}
return resp
}
2018-12-30 18:18:42 +08:00
func (this *Common) GetClientIp(r *http.Request) string {
client_ip := ""
headers := []string{"X_Forwarded_For", "X-Forwarded-For", "X-Real-Ip",
"X_Real_Ip", "Remote_Addr", "Remote-Addr"}
for _, v := range headers {
if _v, ok := r.Header[v]; ok {
if len(_v) > 0 {
client_ip = _v[0]
break
}
}
}
if client_ip == "" {
clients := strings.Split(r.RemoteAddr, ":")
client_ip = clients[0]
}
return client_ip
}
2019-02-25 10:22:51 +08:00
func (this *Server) BackUpMetaDataByDate(date string) {
defer func() {
if re := recover(); re != nil {
buffer := debug.Stack()
log.Error("BackUpMetaDataByDate")
log.Error(re)
log.Error(string(buffer))
}
}()
var (
err error
keyPrefix string
msg string
name string
fileInfo FileInfo
logFileName string
fileLog *os.File
fileMeta *os.File
metaFileName string
2019-02-26 16:57:41 +08:00
fi os.FileInfo
2019-02-25 10:22:51 +08:00
)
2019-02-25 16:24:28 +08:00
logFileName = DATA_DIR + "/" + date + "/" + CONST_FILE_Md5_FILE_NAME
2019-02-25 10:22:51 +08:00
this.lockMap.LockKey(logFileName)
defer this.lockMap.UnLockKey(logFileName)
metaFileName = DATA_DIR + "/" + date + "/" + "meta.data"
os.MkdirAll(DATA_DIR+"/"+date, 0775)
if this.util.IsExist(logFileName) {
os.Remove(logFileName)
}
if this.util.IsExist(metaFileName) {
os.Remove(metaFileName)
}
2019-02-25 16:24:28 +08:00
fileLog, err = os.OpenFile(logFileName, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0664)
2019-02-25 10:22:51 +08:00
if err != nil {
log.Error(err)
return
}
defer fileLog.Close()
2019-02-25 16:24:28 +08:00
fileMeta, err = os.OpenFile(metaFileName, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0664)
2019-02-25 10:22:51 +08:00
if err != nil {
log.Error(err)
return
}
defer fileMeta.Close()
keyPrefix = "%s_%s_"
keyPrefix = fmt.Sprintf(keyPrefix, date, CONST_FILE_Md5_FILE_NAME)
iter := server.logDB.NewIterator(util.BytesPrefix([]byte(keyPrefix)), nil)
defer iter.Release()
for iter.Next() {
if err = json.Unmarshal(iter.Value(), &fileInfo); err != nil {
continue
}
name = fileInfo.Name
if fileInfo.ReName != "" {
name = fileInfo.ReName
}
msg = fmt.Sprintf("%s\t%s\n", fileInfo.Md5, string(iter.Value()))
2019-02-26 16:57:41 +08:00
if _, err = fileMeta.WriteString(msg); err != nil {
log.Error(err)
}
2019-02-25 10:22:51 +08:00
msg = fmt.Sprintf("%s\t%s\n", this.util.MD5(fileInfo.Path+"/"+name), string(iter.Value()))
2019-02-26 16:57:41 +08:00
if _, err = fileMeta.WriteString(msg); err != nil {
log.Error(err)
}
2019-02-25 10:22:51 +08:00
msg = fmt.Sprintf("%s|%d|%d|%s\n", fileInfo.Md5, fileInfo.Size, fileInfo.TimeStamp, fileInfo.Path+"/"+name)
2019-02-26 16:57:41 +08:00
if _, err = fileLog.WriteString(msg); err != nil {
log.Error(err)
}
}
if fi, err = fileLog.Stat(); err != nil {
log.Error(err)
} else if (fi.Size() == 0) {
fileLog.Close()
os.Remove(logFileName)
}
if fi, err = fileMeta.Stat(); err != nil {
log.Error(err)
} else if (fi.Size() == 0) {
fileMeta.Close()
os.Remove(metaFileName)
2019-02-25 10:22:51 +08:00
}
}
2019-02-26 18:51:39 +08:00
func (this *Server) RepairFileInfoFromFile() {
defer func() {
if re := recover(); re != nil {
buffer := debug.Stack()
log.Error("RepairFileInfoFromFile")
log.Error(re)
log.Error(string(buffer))
}
}()
if this.lockMap.IsLock("RepairFileInfoFromFile") {
log.Warn("Lock RepairFileInfoFromFile")
return
}
this.lockMap.LockKey("RepairFileInfoFromFile")
defer this.lockMap.UnLockKey("RepairFileInfoFromFile")
handlefunc := func(file_path string, f os.FileInfo, err error) error {
var (
files []os.FileInfo
fi os.FileInfo
fileInfo FileInfo
sum string
pathMd5 string
)
if f.IsDir() {
files, err = ioutil.ReadDir(file_path)
if err != nil {
return err
}
for _, fi = range files {
if fi.IsDir() || fi.Size() == 0 {
continue
}
2019-02-27 17:13:25 +08:00
file_path = strings.Replace(file_path, "\\", "/", -1)
if DOCKER_DIR != "" {
file_path = strings.Replace(file_path, DOCKER_DIR, "", 1)
}
if strings.HasPrefix(file_path, STORE_DIR_NAME+"/"+LARGE_DIR_NAME) {
log.Info(fmt.Sprintf("ignore small file file %s", file_path+"/"+fi.Name()))
continue
}
2019-02-26 18:51:39 +08:00
pathMd5 = this.util.MD5(file_path + "/" + fi.Name())
if finfo, _ := this.GetFileInfoFromLevelDB(pathMd5); finfo != nil && finfo.Md5 != "" {
2019-02-27 17:13:25 +08:00
log.Info(fmt.Sprintf("exist ignore file %s", file_path+"/"+fi.Name()))
2019-02-26 18:51:39 +08:00
continue
}
sum, err = this.util.GetFileSumByName(file_path+"/"+fi.Name(), Config().FileSumArithmetic)
if err != nil {
log.Error(err)
continue
}
fileInfo = FileInfo{
Size: fi.Size(),
Name: fi.Name(),
2019-02-27 17:13:25 +08:00
Path: file_path,
2019-02-26 18:51:39 +08:00
Md5: sum,
TimeStamp: fi.ModTime().Unix(),
2019-02-26 21:26:11 +08:00
Peers: []string{this.host},
OffSet: -1,
2019-02-26 18:51:39 +08:00
}
2019-02-27 17:13:25 +08:00
//log.Info(fileInfo)
log.Info(file_path, fi.Name())
2019-02-28 11:59:34 +08:00
//this.AppendToQueue(&fileInfo)
2019-02-27 13:12:54 +08:00
this.postFileToPeer(&fileInfo)
2019-02-26 18:51:39 +08:00
this.SaveFileMd5Log(&fileInfo, CONST_FILE_Md5_FILE_NAME)
}
}
return nil
}
pathname := STORE_DIR
fi, _ := os.Stat(pathname)
if fi.IsDir() {
filepath.Walk(pathname, handlefunc)
}
log.Info("RepairFileInfoFromFile is finish.")
}
2019-02-25 10:38:32 +08:00
func (this *Server) RepairStatByDate(date string) StatDateFileInfo {
2019-01-16 15:30:53 +08:00
defer func() {
if re := recover(); re != nil {
buffer := debug.Stack()
2019-02-25 10:22:51 +08:00
log.Error("RepairStatByDate")
2019-01-16 15:30:53 +08:00
log.Error(re)
log.Error(string(buffer))
}
}()
2019-02-23 23:01:40 +08:00
var (
err error
keyPrefix string
fileInfo FileInfo
fileCount int64
fileSize int64
2019-02-25 16:24:28 +08:00
stat StatDateFileInfo
2019-02-23 23:01:40 +08:00
)
keyPrefix = "%s_%s_"
keyPrefix = fmt.Sprintf(keyPrefix, date, CONST_FILE_Md5_FILE_NAME)
2019-02-24 14:09:06 +08:00
iter := server.logDB.NewIterator(util.BytesPrefix([]byte(keyPrefix)), nil)
2019-02-25 10:22:51 +08:00
defer iter.Release()
2019-02-23 23:01:40 +08:00
for iter.Next() {
if err = json.Unmarshal(iter.Value(), &fileInfo); err != nil {
continue
2019-01-16 15:30:53 +08:00
}
2019-02-23 23:01:40 +08:00
fileCount = fileCount + 1
fileSize = fileSize + fileInfo.Size
2019-01-16 15:30:53 +08:00
}
2019-02-23 23:01:40 +08:00
this.statMap.Put(date+"_"+CONST_STAT_FILE_COUNT_KEY, fileCount)
this.statMap.Put(date+"_"+CONST_STAT_FILE_TOTAL_SIZE_KEY, fileSize)
2019-01-18 22:49:09 +08:00
this.SaveStat()
2019-02-25 16:24:28 +08:00
stat.Date = date
stat.FileCount = fileCount
stat.TotalSize = fileSize
2019-02-25 10:38:32 +08:00
return stat
2019-01-16 15:30:53 +08:00
}
2019-02-27 13:12:54 +08:00
func (this *Server) CheckFileExistByMd5(md5s string, fileInfo *FileInfo) bool { // important: just for DownloadFromPeer use
2019-01-21 15:07:42 +08:00
var (
2019-02-24 14:09:06 +08:00
err error
info *FileInfo
fn string
name string
offset int64
data []byte
2019-01-21 15:07:42 +08:00
)
if info, err = this.GetFileInfoFromLevelDB(md5s); err != nil {
return false
}
2019-02-24 14:09:06 +08:00
if info == nil || info.Md5 == "" {
return false
}
2019-02-27 13:12:54 +08:00
if info.Path != fileInfo.Path { // upload thee same file at a tiime from two peer
return false
}
2019-02-24 14:09:06 +08:00
fn = info.Name
if info.ReName != "" {
fn = info.ReName
}
if info.OffSet == -1 {
if this.util.FileExists(DOCKER_DIR + info.Path + "/" + fn) {
return true
}
} else { //small file
if name, offset, _, err = this.ParseSmallFile(fn); err != nil {
return false
}
if !this.util.FileExists(DOCKER_DIR + info.Path + "/" + name) {
return false
}
if data, err = this.util.ReadFileByOffSet(DOCKER_DIR+info.Path+"/"+name, offset, 1); err != nil {
return false
}
if data[0] == '1' {
return true
}
}
2019-01-21 15:07:42 +08:00
if info != nil && info.Md5 != "" {
2019-01-21 16:58:16 +08:00
if fileInfo != nil {
if fileInfo.Path != info.Path {
2019-01-21 15:07:42 +08:00
return false
}
}
return true
} else {
return false
}
}
2019-02-24 14:09:06 +08:00
func (this *Server) ParseSmallFile(filename string) (string, int64, int, error) {
var (
err error
offset int64
length int
)
err = errors.New("unvalid small file")
if len(filename) < 3 {
return filename, -1, -1, err
}
if strings.Contains(filename, "/") {
filename = filename[strings.LastIndex(filename, "/")+1:]
}
pos := strings.Split(filename, ",")
if len(pos) < 3 {
return filename, -1, -1, err
}
offset, err = strconv.ParseInt(pos[1], 10, 64)
if err != nil {
return filename, -1, -1, err
}
if length, err = strconv.Atoi(pos[2]); err != nil {
return filename, offset, -1, err
}
if length > CONST_SMALL_FILE_SIZE || offset < 0 {
err = errors.New("invalid filesize or offset")
return filename, -1, -1, err
}
return pos[0], offset, length, nil
}
2019-01-02 17:46:30 +08:00
func (this *Server) DownloadFromPeer(peer string, fileInfo *FileInfo) {
var (
2019-02-28 15:19:02 +08:00
err error
filename string
fpath string
fi os.FileInfo
sum string
data []byte
downloadUrl string
2019-01-02 17:46:30 +08:00
)
2019-02-22 16:23:59 +08:00
filename = fileInfo.Name
if fileInfo.ReName != "" {
filename = fileInfo.ReName
}
2019-02-24 14:09:06 +08:00
if this.CheckFileExistByMd5(fileInfo.Md5, fileInfo) {
2019-01-21 15:07:42 +08:00
return
}
2019-01-02 17:46:30 +08:00
if _, err = os.Stat(fileInfo.Path); err != nil {
2019-02-15 17:48:55 +08:00
os.MkdirAll(DOCKER_DIR+fileInfo.Path, 0775)
2019-01-02 17:46:30 +08:00
}
2019-02-16 10:24:46 +08:00
//fmt.Println("downloadFromPeer",fileInfo)
p := strings.Replace(fileInfo.Path, STORE_DIR_NAME+"/", "", 1)
2019-01-21 15:07:42 +08:00
//filename=this.util.UrlEncode(filename)
2019-02-28 15:19:02 +08:00
downloadUrl = peer + "/" + Config().Group + "/" + p + "/" + filename
log.Info("DownloadFromPeer: ", downloadUrl)
req := httplib.Get(downloadUrl)
2019-02-16 10:24:46 +08:00
fpath = DOCKER_DIR + fileInfo.Path + "/" + filename
2019-02-19 16:14:31 +08:00
timeout := fileInfo.Size/1024/1024/8 + 30
2019-02-27 17:13:25 +08:00
req.SetTimeout(time.Second*30, time.Second*time.Duration(timeout))
2019-02-14 19:02:51 +08:00
if fileInfo.OffSet != -1 { //small file download
data, err = req.Bytes()
if err != nil {
log.Error(err)
return
}
2019-02-15 11:35:49 +08:00
data2 := make([]byte, len(data)+1)
data2[0] = '1'
for i, v := range data {
data2[i+1] = v
2019-02-14 19:02:51 +08:00
}
2019-02-15 11:35:49 +08:00
data = data2
2019-02-14 19:02:51 +08:00
if int64(len(data)) != fileInfo.Size {
log.Warn("file size is error")
return
}
fpath = strings.Split(fpath, ",")[0]
err = this.util.WriteFileByOffSet(fpath, fileInfo.OffSet, data)
if err != nil {
log.Warn(err)
}
2019-02-15 11:35:49 +08:00
this.SaveFileMd5Log(fileInfo, CONST_FILE_Md5_FILE_NAME)
2019-02-14 19:02:51 +08:00
return
}
2019-01-08 18:24:51 +08:00
if err = req.ToFile(fpath); err != nil {
2019-01-02 17:46:30 +08:00
log.Error(err)
2019-02-12 18:35:26 +08:00
return
2019-01-02 17:46:30 +08:00
}
2019-01-08 18:24:51 +08:00
if fi, err = os.Stat(fpath); err != nil {
os.Remove(fpath)
return
}
2019-02-13 10:09:41 +08:00
if sum, err = this.util.GetFileSumByName(fpath, Config().FileSumArithmetic); err != nil {
2019-02-12 18:35:26 +08:00
log.Error(err)
return
}
2019-02-13 10:09:41 +08:00
if fi.Size() != fileInfo.Size || sum != fileInfo.Md5 {
2019-02-12 18:35:26 +08:00
log.Error("file sum check error")
2019-01-08 18:24:51 +08:00
os.Remove(fpath)
2019-02-12 18:35:26 +08:00
return
2019-01-08 18:24:51 +08:00
}
2019-01-31 12:50:03 +08:00
if this.util.IsExist(fpath) {
this.SaveFileMd5Log(fileInfo, CONST_FILE_Md5_FILE_NAME)
}
2019-01-02 17:46:30 +08:00
}
2018-05-10 18:19:04 +08:00
func (this *Server) Download(w http.ResponseWriter, r *http.Request) {
2019-01-01 23:31:14 +08:00
var (
2019-01-09 17:01:01 +08:00
err error
pathMd5 string
info os.FileInfo
peer string
fileInfo *FileInfo
fullpath string
pathval url.Values
token string
timestamp string
maxTimestamp int64
minTimestamp int64
ts int64
md5sum string
fp *os.File
isPeer bool
2019-02-14 19:02:51 +08:00
isSmallFile bool
data []byte
offset int64
length int
smallPath string
2019-02-15 11:35:49 +08:00
notFound bool
2019-02-19 11:24:06 +08:00
//isBigFile bool
2019-01-01 23:31:14 +08:00
)
2019-01-09 17:01:01 +08:00
r.ParseForm()
isPeer = this.IsPeer(r)
if Config().DownloadUseToken && !isPeer {
2019-01-09 17:01:01 +08:00
token = r.FormValue("token")
timestamp = r.FormValue("timestamp")
if token == "" || timestamp == "" {
w.Write([]byte("unvalid request"))
return
}
maxTimestamp = time.Now().Add(time.Second *
time.Duration(Config().DownloadTokenExpire)).Unix()
minTimestamp = time.Now().Add(-time.Second *
time.Duration(Config().DownloadTokenExpire)).Unix()
if ts, err = strconv.ParseInt(timestamp, 10, 64); err != nil {
w.Write([]byte("unvalid timestamp"))
return
}
if ts > maxTimestamp || ts < minTimestamp {
w.Write([]byte("timestamp expire"))
return
}
}
2019-01-02 17:46:30 +08:00
fullpath = r.RequestURI[len(Config().Group)+2 : len(r.RequestURI)]
2019-02-16 10:24:46 +08:00
fullpath = DOCKER_DIR + STORE_DIR_NAME + "/" + fullpath
//fmt.Println("fullpath",fullpath)
2019-02-14 19:02:51 +08:00
if strings.HasPrefix(r.RequestURI, "/"+Config().Group+"/"+LARGE_DIR_NAME+"/") {
isSmallFile = true
2019-02-16 10:24:46 +08:00
smallPath = fullpath //notice order
2019-02-14 19:02:51 +08:00
fullpath = strings.Split(fullpath, ",")[0]
}
_ = isSmallFile
_ = smallPath
2019-02-15 17:48:55 +08:00
fullpath = strings.Replace(fullpath, "&", "$$$$", -1)
2019-01-02 20:37:50 +08:00
if pathval, err = url.ParseQuery(fullpath); err != nil {
log.Error(err)
} else {
2019-02-15 17:48:55 +08:00
for k, v := range pathval {
2019-01-02 20:37:50 +08:00
if k != "" {
2019-02-15 17:48:55 +08:00
if len(v) > 0 && v[0] != "" {
fullpath = k + "=" + v[0]
} else {
fullpath = k
}
2019-01-02 20:37:50 +08:00
}
}
}
2019-02-15 17:48:55 +08:00
fullpath = strings.Replace(fullpath, "$$$$", "&", -1)
2019-01-09 17:01:01 +08:00
CheckToken := func(token string, md5sum string, timestamp string) bool {
if this.util.MD5(md5sum+timestamp) != token {
return false
}
return true
}
if Config().DownloadUseToken && !isPeer {
2019-02-14 19:02:51 +08:00
if isSmallFile {
pathMd5 = this.util.MD5(smallPath)
} else {
fullpath = strings.Split(fullpath, "?")[0]
pathMd5 = this.util.MD5(fullpath)
}
2019-01-09 17:01:01 +08:00
if fileInfo, err = this.GetFileInfoFromLevelDB(pathMd5); err != nil {
log.Error(err)
if this.util.FileExists(fullpath) {
if fp, err = os.Create(fullpath); err != nil {
log.Error(err)
}
if fp != nil {
defer fp.Close()
}
2019-01-23 21:39:13 +08:00
md5sum = this.util.GetFileSum(fp, Config().FileSumArithmetic)
2019-01-09 17:01:01 +08:00
if !CheckToken(token, md5sum, timestamp) {
w.Write([]byte("unvalid request,error token"))
return
}
}
} else {
if !CheckToken(token, fileInfo.Md5, timestamp) {
w.Write([]byte("unvalid request,error token"))
return
}
}
}
2019-02-14 19:02:51 +08:00
if isSmallFile {
2019-02-24 14:09:06 +08:00
if _, offset, length, err = this.ParseSmallFile(r.RequestURI); err != nil {
log.Error(err)
2019-02-27 13:12:54 +08:00
w.Write([]byte(err.Error()))
2019-02-14 19:02:51 +08:00
return
}
if info, err = os.Stat(fullpath); err != nil {
2019-02-22 16:23:59 +08:00
notFound = true
goto NotFound // if return can't not repair file
2019-02-14 19:02:51 +08:00
return
}
2019-02-15 11:35:49 +08:00
if info.Size() < offset+int64(length) {
notFound = true
2019-02-14 19:02:51 +08:00
} else {
data, err = this.util.ReadFileByOffSet(fullpath, offset, length)
if err != nil {
log.Error(err)
return
}
2019-02-15 11:35:49 +08:00
if string(data[0]) == "1" {
2019-02-14 19:02:51 +08:00
w.Write(data[1:])
return
} else {
2019-02-15 11:35:49 +08:00
notFound = true
2019-02-14 19:02:51 +08:00
}
}
}
2019-02-22 16:23:59 +08:00
NotFound:
2019-02-24 14:09:06 +08:00
if info, err = os.Stat(fullpath); err != nil || info.Size() == 0 || notFound {
2019-02-27 17:13:25 +08:00
log.Error(err, fullpath, smallPath)
2019-02-15 11:35:49 +08:00
if isSmallFile && notFound {
2019-02-14 19:02:51 +08:00
pathMd5 = this.util.MD5(smallPath)
} else {
2019-02-27 17:13:25 +08:00
if err == nil && Config().ShowDir && info.IsDir() {
2019-02-27 13:12:54 +08:00
goto SHOW_DIR
}
2019-02-14 19:02:51 +08:00
pathMd5 = this.util.MD5(fullpath)
}
2019-01-02 17:46:30 +08:00
for _, peer = range Config().Peers {
if fileInfo, err = this.checkPeerFileExist(peer, pathMd5); err != nil {
log.Error(err)
continue
}
if fileInfo.Md5 != "" {
if Config().DownloadUseToken && !isPeer {
2019-01-09 17:01:01 +08:00
if !CheckToken(token, fileInfo.Md5, timestamp) {
w.Write([]byte("unvalid request,error token"))
return
}
}
2019-01-02 17:46:30 +08:00
go this.DownloadFromPeer(peer, fileInfo)
http.Redirect(w, r, peer+r.RequestURI, 302)
2019-01-16 10:28:51 +08:00
return
2019-01-01 23:31:14 +08:00
}
}
2019-01-08 18:24:51 +08:00
w.WriteHeader(404)
2019-01-01 23:31:14 +08:00
return
}
2019-02-27 13:12:54 +08:00
SHOW_DIR:
2019-01-01 23:31:14 +08:00
if !Config().ShowDir && info.IsDir() {
2019-01-03 10:08:01 +08:00
w.Write([]byte("list dir deny"))
2019-01-01 23:31:14 +08:00
return
}
2018-05-10 18:19:04 +08:00
log.Info("download:" + r.RequestURI)
staticHandler.ServeHTTP(w, r)
}
2018-12-30 17:17:40 +08:00
func (this *Server) GetServerURI(r *http.Request) string {
return fmt.Sprintf("http://%s/", r.Host)
}
2019-02-01 11:31:19 +08:00
func (this *Server) CheckFileAndSendToPeer(date string, filename string, isForceUpload bool) {
2019-01-20 11:05:22 +08:00
var (
2019-01-28 17:01:26 +08:00
md5set mapset.Set
err error
md5s []interface{}
2019-01-20 11:05:22 +08:00
)
2018-12-30 18:18:42 +08:00
defer func() {
if re := recover(); re != nil {
buffer := debug.Stack()
log.Error("CheckFileAndSendToPeer")
log.Error(re)
log.Error(string(buffer))
}
}()
2019-01-28 17:01:26 +08:00
if md5set, err = this.GetMd5sByDate(date, filename); err != nil {
log.Error(err)
return
}
md5s = md5set.ToSlice()
for _, md := range md5s {
if md == nil {
continue
}
if fileInfo, _ := this.GetFileInfoFromLevelDB(md.(string)); fileInfo != nil && fileInfo.Md5 != "" {
2019-02-01 11:31:19 +08:00
if isForceUpload {
2019-01-28 17:01:26 +08:00
fileInfo.Peers = []string{}
}
if len(fileInfo.Peers) > len(Config().Peers) {
continue
}
2019-02-13 10:09:41 +08:00
if !this.util.Contains(this.host, fileInfo.Peers) {
fileInfo.Peers = append(fileInfo.Peers, this.host) // peer is null
2019-02-12 19:54:19 +08:00
}
2019-01-28 17:01:26 +08:00
if filename == CONST_Md5_QUEUE_FILE_NAME {
this.AppendToDownloadQueue(fileInfo)
} else {
this.AppendToQueue(fileInfo)
2018-12-30 17:17:40 +08:00
}
2019-01-28 17:01:26 +08:00
}
2018-12-30 17:17:40 +08:00
}
}
2019-01-18 22:49:09 +08:00
func (this *Server) postFileToPeer(fileInfo *FileInfo) {
2019-01-02 17:46:30 +08:00
var (
err error
peer string
filename string
info *FileInfo
postURL string
result string
2019-01-02 18:09:02 +08:00
fi os.FileInfo
2019-01-17 01:05:27 +08:00
i int
2019-01-21 15:07:42 +08:00
data []byte
2019-02-01 11:31:19 +08:00
fpath string
2019-01-02 17:46:30 +08:00
)
defer func() {
if re := recover(); re != nil {
buffer := debug.Stack()
log.Error("postFileToPeer")
log.Error(re)
log.Error(string(buffer))
2018-12-30 17:17:40 +08:00
}
2019-01-02 17:46:30 +08:00
}()
2019-02-16 10:24:46 +08:00
//fmt.Println("postFile",fileInfo)
2019-01-17 01:05:27 +08:00
for i, peer = range Config().Peers {
_ = i
2018-12-30 17:17:40 +08:00
if fileInfo.Peers == nil {
fileInfo.Peers = []string{}
}
if this.util.Contains(peer, fileInfo.Peers) {
continue
}
2019-01-02 17:46:30 +08:00
filename = fileInfo.Name
2019-01-28 17:01:26 +08:00
if fileInfo.ReName != "" {
2019-01-02 17:46:30 +08:00
filename = fileInfo.ReName
2019-02-14 19:02:51 +08:00
if fileInfo.OffSet != -1 {
filename = strings.Split(fileInfo.ReName, ",")[0]
}
2019-01-02 00:03:48 +08:00
}
2019-02-16 10:24:46 +08:00
fpath = DOCKER_DIR + fileInfo.Path + "/" + filename
2019-01-28 17:01:26 +08:00
if !this.util.FileExists(fpath) {
2019-02-01 11:31:19 +08:00
log.Warn(fmt.Sprintf("file '%s' not found", fpath))
2018-12-30 17:17:40 +08:00
continue
2019-01-02 18:09:02 +08:00
} else {
if fileInfo.Size == 0 {
2019-01-28 17:01:26 +08:00
if fi, err = os.Stat(fpath); err != nil {
2019-01-02 18:09:02 +08:00
log.Error(err)
} else {
fileInfo.Size = fi.Size()
}
}
2018-12-30 17:17:40 +08:00
}
2019-01-17 01:05:27 +08:00
if info, err = this.checkPeerFileExist(peer, fileInfo.Md5); info.Md5 != "" {
2019-01-20 11:05:22 +08:00
fileInfo.Peers = append(fileInfo.Peers, peer)
2019-02-24 14:09:06 +08:00
if _, err = this.SaveFileInfoToLevelDB(fileInfo.Md5, fileInfo, this.ldb); err != nil {
2019-01-20 11:05:22 +08:00
log.Error(err)
}
2018-12-30 23:31:42 +08:00
continue
}
2019-02-12 12:55:55 +08:00
postURL = fmt.Sprintf("%s%s", peer, this.getRequestURI("syncfile_info"))
2019-01-02 17:46:30 +08:00
b := httplib.Post(postURL)
2019-02-27 13:12:54 +08:00
b.SetTimeout(time.Second*30, time.Second*30)
2019-01-21 15:07:42 +08:00
if data, err = json.Marshal(fileInfo); err != nil {
log.Error(err)
return
}
b.Param("fileInfo", string(data))
2019-01-02 17:46:30 +08:00
result, err = b.String()
2019-01-20 11:05:22 +08:00
if !strings.HasPrefix(result, "http://") || err != nil {
this.SaveFileMd5Log(fileInfo, CONST_Md5_ERROR_FILE_NAME)
}
if strings.HasPrefix(result, "http://") {
2019-01-02 17:46:30 +08:00
log.Info(result)
2018-12-30 17:17:40 +08:00
if !this.util.Contains(peer, fileInfo.Peers) {
fileInfo.Peers = append(fileInfo.Peers, peer)
2019-02-24 14:09:06 +08:00
if _, err = this.SaveFileInfoToLevelDB(fileInfo.Md5, fileInfo, this.ldb); err != nil {
2019-01-02 17:46:30 +08:00
log.Error(err)
2018-12-30 17:17:40 +08:00
}
}
}
if err != nil {
log.Error(err)
}
}
}
2019-01-03 10:08:01 +08:00
func (this *Server) SaveFileMd5Log(fileInfo *FileInfo, filename string) {
2019-02-25 16:24:28 +08:00
var (
info FileInfo
)
2019-02-28 15:19:02 +08:00
for len(this.queueFileLog)+len(this.queueFileLog)/10 > CONST_QUEUE_SIZE {
time.Sleep(time.Second * 1)
}
2019-02-25 16:24:28 +08:00
info = *fileInfo
this.queueFileLog <- &FileLog{FileInfo: &info, FileName: filename}
2019-02-23 10:07:00 +08:00
}
func (this *Server) saveFileMd5Log(fileInfo *FileInfo, filename string) {
2019-01-03 10:08:01 +08:00
var (
2019-01-28 17:01:26 +08:00
err error
outname string
logDate string
ok bool
fullpath string
2019-02-23 10:07:00 +08:00
md5Path string
logKey string
2019-01-03 10:08:01 +08:00
)
2019-02-23 10:07:00 +08:00
defer func() {
if re := recover(); re != nil {
buffer := debug.Stack()
log.Error("saveFileMd5Log")
log.Error(re)
log.Error(string(buffer))
}
}()
2019-02-23 13:01:21 +08:00
if fileInfo == nil || fileInfo.Md5 == "" || filename == "" {
log.Warn("saveFileMd5Log", fileInfo, filename)
return
}
logDate = this.util.GetDayFromTimeStamp(fileInfo.TimeStamp)
2019-01-28 17:01:26 +08:00
outname = fileInfo.Name
if fileInfo.ReName != "" {
outname = fileInfo.ReName
2019-01-21 16:58:16 +08:00
}
2019-01-28 17:01:26 +08:00
fullpath = fileInfo.Path + "/" + outname
2019-02-23 23:01:40 +08:00
logKey = fmt.Sprintf("%s_%s_%s", logDate, filename, fileInfo.Md5)
if filename == CONST_FILE_Md5_FILE_NAME {
2019-02-24 14:09:06 +08:00
if ok, err = this.IsExistFromLevelDB(fileInfo.Md5, this.ldb); !ok {
2019-02-23 10:07:00 +08:00
this.statMap.AddCountInt64(logDate+"_"+CONST_STAT_FILE_COUNT_KEY, 1)
this.statMap.AddCountInt64(logDate+"_"+CONST_STAT_FILE_TOTAL_SIZE_KEY, fileInfo.Size)
this.SaveStat()
2019-02-24 18:23:52 +08:00
}
2019-02-25 16:24:28 +08:00
if _, err = this.SaveFileInfoToLevelDB(logKey, fileInfo, this.logDB); err != nil {
log.Error(err)
}
2019-02-24 18:23:52 +08:00
if _, err := this.SaveFileInfoToLevelDB(fileInfo.Md5, fileInfo, this.ldb); err != nil {
log.Error("saveToLevelDB", err, fileInfo)
}
if _, err = this.SaveFileInfoToLevelDB(this.util.MD5(fullpath), fileInfo, this.ldb); err != nil {
log.Error("saveToLevelDB", err, fileInfo)
2019-02-20 17:32:13 +08:00
}
2019-02-23 23:01:40 +08:00
return
2019-02-23 10:07:00 +08:00
}
if filename == CONST_REMOME_Md5_FILE_NAME {
2019-02-24 14:09:06 +08:00
if ok, err = this.IsExistFromLevelDB(fileInfo.Md5, this.ldb); ok {
2019-02-23 10:07:00 +08:00
this.statMap.AddCountInt64(logDate+"_"+CONST_STAT_FILE_COUNT_KEY, -1)
this.statMap.AddCountInt64(logDate+"_"+CONST_STAT_FILE_TOTAL_SIZE_KEY, -fileInfo.Size)
this.SaveStat()
2019-02-20 17:32:13 +08:00
}
2019-02-24 18:23:52 +08:00
this.RemoveKeyFromLevelDB(logKey, this.logDB)
md5Path = this.util.MD5(fullpath)
if err := this.RemoveKeyFromLevelDB(fileInfo.Md5, this.ldb); err != nil {
log.Error("RemoveKeyFromLevelDB", err, fileInfo)
}
if err = this.RemoveKeyFromLevelDB(md5Path, this.ldb); err != nil {
log.Error("RemoveKeyFromLevelDB", err, fileInfo)
}
2019-02-23 23:01:40 +08:00
return
2019-01-18 10:32:25 +08:00
}
2019-02-24 14:09:06 +08:00
this.SaveFileInfoToLevelDB(logKey, fileInfo, this.logDB)
2019-01-03 10:08:01 +08:00
}
2018-12-30 23:31:42 +08:00
func (this *Server) checkPeerFileExist(peer string, md5sum string) (*FileInfo, error) {
var (
2019-01-16 12:08:47 +08:00
err error
fileInfo FileInfo
2018-12-30 23:31:42 +08:00
)
2019-02-12 12:55:55 +08:00
req := httplib.Post(fmt.Sprintf("%s%s?md5=%s", peer, this.getRequestURI("check_file_exist"), md5sum))
2019-01-23 21:39:13 +08:00
req.SetTimeout(time.Second*5, time.Second*10)
2019-01-16 12:08:47 +08:00
if err = req.ToJSON(&fileInfo); err != nil {
return &FileInfo{}, err
}
if fileInfo.Md5 == "" {
return &fileInfo, errors.New("not found")
2018-12-30 23:31:42 +08:00
}
2019-01-16 12:08:47 +08:00
return &fileInfo, nil
2018-12-30 23:31:42 +08:00
}
2018-12-30 17:17:40 +08:00
func (this *Server) CheckFileExist(w http.ResponseWriter, r *http.Request) {
var (
2018-12-30 23:31:42 +08:00
data []byte
2018-12-30 17:17:40 +08:00
err error
fileInfo *FileInfo
2019-01-23 21:39:13 +08:00
fpath string
2018-12-30 17:17:40 +08:00
)
r.ParseForm()
md5sum := ""
2019-01-23 21:39:13 +08:00
md5sum = r.FormValue("md5")
2019-01-20 11:05:22 +08:00
if fileInfo, err = this.GetFileInfoFromLevelDB(md5sum); fileInfo != nil {
2019-02-20 20:14:21 +08:00
if fileInfo.OffSet != -1 {
2019-02-20 17:32:13 +08:00
if data, err = json.Marshal(fileInfo); err != nil {
log.Error(err)
}
w.Write(data)
return
}
2019-02-20 20:14:21 +08:00
fpath = DOCKER_DIR + fileInfo.Path + "/" + fileInfo.Name
2019-01-23 21:39:13 +08:00
if fileInfo.ReName != "" {
2019-02-20 20:14:21 +08:00
fpath = DOCKER_DIR + fileInfo.Path + "/" + fileInfo.ReName
2019-01-23 21:39:13 +08:00
}
if this.util.IsExist(fpath) {
if data, err = json.Marshal(fileInfo); err == nil {
w.Write(data)
return
2019-02-20 17:32:13 +08:00
} else {
log.Error(err)
2019-01-23 21:39:13 +08:00
}
} else {
2019-02-20 20:14:21 +08:00
if fileInfo.OffSet == -1 {
2019-02-24 14:09:06 +08:00
this.RemoveKeyFromLevelDB(md5sum, this.ldb) // when file delete,delete from leveldb
2019-02-20 17:32:13 +08:00
}
2018-12-30 23:31:42 +08:00
}
2018-12-30 17:17:40 +08:00
}
2018-12-30 23:31:42 +08:00
data, _ = json.Marshal(FileInfo{})
w.Write(data)
2019-01-23 21:39:13 +08:00
return
2018-12-30 17:17:40 +08:00
}
func (this *Server) Sync(w http.ResponseWriter, r *http.Request) {
2019-02-12 12:55:55 +08:00
var (
result JsonResult
)
r.ParseForm()
result.Status = "fail"
2019-01-18 22:49:09 +08:00
if !this.IsPeer(r) {
2019-02-12 12:55:55 +08:00
result.Message = "client must be in cluster"
w.Write([]byte(this.util.JsonEncodePretty(result)))
2019-01-18 22:49:09 +08:00
return
}
2018-12-30 17:17:40 +08:00
date := ""
2018-12-30 23:31:42 +08:00
force := ""
2019-02-24 18:23:52 +08:00
inner := ""
2019-02-01 11:31:19 +08:00
isForceUpload := false
force = r.FormValue("force")
date = r.FormValue("date")
2019-02-24 18:23:52 +08:00
inner = r.FormValue("inner")
2019-01-17 21:46:29 +08:00
if force == "1" {
2019-02-01 11:31:19 +08:00
isForceUpload = true
2018-12-30 23:31:42 +08:00
}
2019-02-24 18:23:52 +08:00
if inner != "1" {
for _, peer := range Config().Peers {
req := httplib.Post(peer + this.getRequestURI("sync"))
req.Param("force", force)
req.Param("inner", "1")
req.Param("date", date)
if _, err := req.String(); err != nil {
log.Error(err)
}
}
}
if date == "" {
2019-02-12 12:55:55 +08:00
result.Message = "require paramete date &force , ?date=20181230"
w.Write([]byte(this.util.JsonEncodePretty(result)))
2018-12-30 17:17:40 +08:00
return
}
date = strings.Replace(date, ".", "", -1)
2019-02-01 11:31:19 +08:00
if isForceUpload {
go this.CheckFileAndSendToPeer(date, CONST_FILE_Md5_FILE_NAME, isForceUpload)
2019-01-19 10:35:42 +08:00
} else {
2019-02-01 11:31:19 +08:00
go this.CheckFileAndSendToPeer(date, CONST_Md5_ERROR_FILE_NAME, isForceUpload)
2018-12-30 17:17:40 +08:00
}
2019-02-12 12:55:55 +08:00
result.Status = "ok"
result.Message = "job is running"
w.Write([]byte(this.util.JsonEncodePretty(result)))
2018-12-30 17:17:40 +08:00
}
2019-02-24 14:09:06 +08:00
func (this *Server) IsExistFromLevelDB(key string, db *leveldb.DB) (bool, error) {
return db.Has([]byte(key), nil)
2019-02-23 10:07:00 +08:00
}
2019-01-01 23:31:14 +08:00
func (this *Server) GetFileInfoFromLevelDB(key string) (*FileInfo, error) {
var (
2019-02-25 10:38:32 +08:00
err error
data []byte
2019-01-01 23:31:14 +08:00
fileInfo FileInfo
)
2019-01-16 10:28:51 +08:00
if data, err = this.ldb.Get([]byte(key), nil); err != nil {
2019-01-01 23:31:14 +08:00
return nil, err
}
if err = json.Unmarshal(data, &fileInfo); err != nil {
return nil, err
}
return &fileInfo, nil
}
2019-01-02 17:46:30 +08:00
func (this *Server) SaveStat() {
2019-01-16 10:28:51 +08:00
SaveStatFunc := func() {
defer func() {
if re := recover(); re != nil {
buffer := debug.Stack()
log.Error("SaveStatFunc")
log.Error(re)
log.Error(string(buffer))
}
}()
2019-01-18 10:32:25 +08:00
stat := this.statMap.Get()
2019-01-19 20:44:54 +08:00
if v, ok := stat[CONST_STAT_FILE_COUNT_KEY]; ok {
2019-01-16 10:28:51 +08:00
switch v.(type) {
2019-01-20 11:05:22 +08:00
case int64, int32, int, float64, float32:
if v.(int64) >= 0 {
2019-01-16 10:28:51 +08:00
if data, err := json.Marshal(stat); err != nil {
log.Error(err)
} else {
this.util.WriteBinFile(CONST_STAT_FILE_NAME, data)
}
}
2019-01-02 17:46:30 +08:00
}
}
}
2019-01-18 22:49:09 +08:00
SaveStatFunc()
2019-01-02 17:46:30 +08:00
}
2019-02-24 14:09:06 +08:00
func (this *Server) RemoveKeyFromLevelDB(key string, db *leveldb.DB) (error) {
2019-01-23 21:39:13 +08:00
var (
2019-01-24 11:43:42 +08:00
err error
2019-01-23 21:39:13 +08:00
)
2019-02-24 14:09:06 +08:00
err = db.Delete([]byte(key), nil)
2019-01-23 21:39:13 +08:00
return err
}
2019-02-24 14:09:06 +08:00
func (this *Server) SaveFileInfoToLevelDB(key string, fileInfo *FileInfo, db *leveldb.DB) (*FileInfo, error) {
2019-01-01 23:31:14 +08:00
var (
err error
data []byte
)
2019-02-25 16:24:28 +08:00
if fileInfo == nil || db == nil {
return nil, errors.New("fileInfo is null or db is null")
}
2019-01-01 23:31:14 +08:00
if data, err = json.Marshal(fileInfo); err != nil {
return fileInfo, err
}
2019-02-24 14:09:06 +08:00
if err = db.Put([]byte(key), data, nil); err != nil {
2019-01-01 23:31:14 +08:00
return fileInfo, err
}
return fileInfo, nil
}
2019-01-03 10:08:01 +08:00
func (this *Server) IsPeer(r *http.Request) bool {
var (
ip string
peer string
bflag bool
)
2019-01-28 17:01:26 +08:00
//return true
2019-01-03 10:08:01 +08:00
ip = this.util.GetClientIp(r)
if ip == "127.0.0.1" || ip == this.util.GetPulicIP() {
2019-01-18 19:05:18 +08:00
return true
}
2019-02-11 16:02:34 +08:00
if this.util.Contains(ip, Config().AdminIps) {
return true
}
2019-01-03 10:08:01 +08:00
ip = "http://" + ip
bflag = false
for _, peer = range Config().Peers {
if strings.HasPrefix(peer, ip) {
bflag = true
break
}
}
return bflag
}
2019-01-18 19:05:18 +08:00
func (this *Server) ReceiveMd5s(w http.ResponseWriter, r *http.Request) {
var (
err error
2019-01-28 17:01:26 +08:00
md5str string
2019-01-18 19:05:18 +08:00
fileInfo *FileInfo
2019-01-28 17:01:26 +08:00
md5s []string
2019-01-18 19:05:18 +08:00
)
if !this.IsPeer(r) {
log.Warn(fmt.Sprintf("ReceiveMd5s %s", this.util.GetClientIp(r)))
2019-02-11 16:02:34 +08:00
w.Write([]byte(this.GetClusterNotPermitMessage(r)))
2019-01-18 19:05:18 +08:00
return
}
r.ParseForm()
2019-01-28 17:01:26 +08:00
md5str = r.FormValue("md5s")
md5s = strings.Split(md5str, ",")
AppendFunc := func(md5s []string) {
for _, m := range md5s {
if m != "" {
if fileInfo, err = this.GetFileInfoFromLevelDB(m); err != nil {
log.Error(err)
continue
}
this.AppendToQueue(fileInfo)
}
}
2019-01-18 19:05:18 +08:00
}
2019-01-28 17:01:26 +08:00
go AppendFunc(md5s)
2019-01-18 19:05:18 +08:00
}
2019-02-12 12:55:55 +08:00
func (this *Server) GetClusterNotPermitMessage(r *http.Request) string {
var (
message string
2019-02-11 16:02:34 +08:00
)
2019-02-12 12:55:55 +08:00
message = fmt.Sprintf(CONST_MESSAGE_CLUSTER_IP, this.util.GetClientIp(r))
return message
2019-02-11 16:02:34 +08:00
}
2019-01-18 19:05:18 +08:00
func (this *Server) GetMd5sForWeb(w http.ResponseWriter, r *http.Request) {
var (
date string
err error
2019-01-18 19:05:18 +08:00
result mapset.Set
lines []string
2019-01-28 17:01:26 +08:00
md5s []interface{}
2019-01-18 19:05:18 +08:00
)
if !this.IsPeer(r) {
2019-02-11 16:02:34 +08:00
w.Write([]byte(this.GetClusterNotPermitMessage(r)))
2019-01-18 19:05:18 +08:00
return
}
date = r.FormValue("date")
if result, err = this.GetMd5sByDate(date, CONST_FILE_Md5_FILE_NAME); err != nil {
log.Error(err)
return
}
2019-01-28 17:01:26 +08:00
md5s = result.ToSlice()
for _, line := range md5s {
if line != nil && line != "" {
lines = append(lines, line.(string))
}
}
w.Write([]byte( strings.Join(lines, ",") ))
2019-01-18 19:05:18 +08:00
}
func (this *Server) GetMd5File(w http.ResponseWriter, r *http.Request) {
var (
date string
2019-01-18 19:05:18 +08:00
fpath string
data []byte
err error
2019-01-18 19:05:18 +08:00
)
if !this.IsPeer(r) {
return
}
fpath = DATA_DIR + "/" + date + "/" + CONST_FILE_Md5_FILE_NAME
2019-01-18 19:05:18 +08:00
if !this.util.FileExists(fpath) {
w.WriteHeader(404)
return
}
if data, err = ioutil.ReadFile(fpath); err != nil {
2019-01-18 19:05:18 +08:00
w.WriteHeader(500)
return
}
w.Write(data)
}
2019-01-20 23:45:16 +08:00
func (this *Server) GetMd5sMapByDate(date string, filename string) (*CommonMap, error) {
var (
err error
result *CommonMap
fpath string
content string
lines []string
line string
cols []string
data []byte
)
result = &CommonMap{m: make(map[string]interface{})}
if filename == "" {
fpath = DATA_DIR + "/" + date + "/" + CONST_FILE_Md5_FILE_NAME
} else {
fpath = DATA_DIR + "/" + date + "/" + filename
}
if !this.util.FileExists(fpath) {
return result, errors.New(fmt.Sprintf("fpath %s not found", fpath))
}
if data, err = ioutil.ReadFile(fpath); err != nil {
return result, err
}
content = string(data)
lines = strings.Split(content, "\n")
for _, line = range lines {
cols = strings.Split(line, "|")
if len(cols) > 2 {
if _, err = strconv.ParseInt(cols[1], 10, 64); err != nil {
continue
}
result.Add(cols[0])
}
}
return result, nil
}
func (this *Server) GetMd5sByDate(date string, filename string) (mapset.Set, error) {
2019-01-18 19:05:18 +08:00
var (
2019-02-23 23:01:40 +08:00
keyPrefix string
md5set mapset.Set
2019-02-24 14:09:06 +08:00
keys []string
2019-01-18 19:05:18 +08:00
)
2019-02-23 23:01:40 +08:00
md5set = mapset.NewSet()
keyPrefix = "%s_%s_"
keyPrefix = fmt.Sprintf(keyPrefix, date, filename)
2019-02-24 14:09:06 +08:00
iter := server.logDB.NewIterator(util.BytesPrefix([]byte(keyPrefix)), nil)
2019-02-23 23:01:40 +08:00
for iter.Next() {
2019-02-24 14:09:06 +08:00
keys = strings.Split(string(iter.Key()), "_")
if len(keys) >= 3 {
2019-02-23 23:01:40 +08:00
md5set.Add(keys[2])
2019-01-18 19:05:18 +08:00
}
}
2019-02-23 23:01:40 +08:00
iter.Release()
return md5set, nil
2019-01-18 19:05:18 +08:00
}
2019-01-21 15:07:42 +08:00
func (this *Server) SyncFileInfo(w http.ResponseWriter, r *http.Request) {
var (
err error
fileInfo FileInfo
fileInfoStr string
filename string
)
r.ParseForm()
if !this.IsPeer(r) {
return
}
fileInfoStr = r.FormValue("fileInfo")
if err = json.Unmarshal([]byte(fileInfoStr), &fileInfo); err != nil {
2019-02-11 16:02:34 +08:00
w.Write([]byte(this.GetClusterNotPermitMessage(r)))
2019-01-21 15:07:42 +08:00
log.Error(err)
return
}
this.SaveFileMd5Log(&fileInfo, CONST_Md5_QUEUE_FILE_NAME)
2019-02-28 15:19:02 +08:00
this.AppendToDownloadQueue(&fileInfo)
2019-01-21 15:07:42 +08:00
filename = fileInfo.Name
if fileInfo.ReName != "" {
filename = fileInfo.ReName
}
p := strings.Replace(fileInfo.Path, STORE_DIR+"/", "", 1)
2019-02-01 11:31:19 +08:00
downloadUrl := fmt.Sprintf("http://%s/%s", r.Host, Config().Group+"/"+p+"/"+filename)
2019-02-28 11:59:34 +08:00
log.Info("SyncFileInfo: ", downloadUrl)
2019-02-01 11:31:19 +08:00
w.Write([]byte(downloadUrl))
2019-01-21 15:07:42 +08:00
}
2019-01-08 10:23:13 +08:00
func (this *Server) CheckScene(scene string) (bool, error) {
if len(Config().Scenes) == 0 {
return true, nil
}
if !this.util.Contains(scene, Config().Scenes) {
return false, errors.New("not valid scene")
}
return true, nil
}
2019-01-08 16:53:03 +08:00
func (this *Server) RemoveFile(w http.ResponseWriter, r *http.Request) {
var (
err error
md5sum string
fileInfo *FileInfo
fpath string
2019-02-12 12:55:55 +08:00
delUrl string
result JsonResult
inner string
2019-02-15 17:48:55 +08:00
name string
2019-02-11 17:39:00 +08:00
)
2019-02-12 12:55:55 +08:00
_ = delUrl
_ = inner
2019-01-08 16:53:03 +08:00
r.ParseForm()
md5sum = r.FormValue("md5")
2019-02-15 17:48:55 +08:00
fpath = r.FormValue("path")
2019-02-11 17:39:00 +08:00
inner = r.FormValue("inner")
2019-02-12 12:55:55 +08:00
result.Status = "fail"
2019-02-15 17:48:55 +08:00
if fpath != "" && md5sum == "" {
2019-02-22 20:55:28 +08:00
fpath = strings.Replace(fpath, "/"+Config().Group+"/", STORE_DIR_NAME+"/", 1)
2019-02-15 17:48:55 +08:00
md5sum = this.util.MD5(fpath)
}
2019-02-23 10:07:00 +08:00
if inner != "1" {
for _, peer := range Config().Peers {
delFile := func(peer string, md5sum string, fileInfo *FileInfo) {
delUrl = fmt.Sprintf("%s%s", peer, this.getRequestURI("delete"))
req := httplib.Post(delUrl)
req.Param("md5", md5sum)
req.Param("inner", "1")
req.SetTimeout(time.Second*5, time.Second*10)
if _, err = req.String(); err != nil {
log.Error(err)
}
}
go delFile(peer, md5sum, fileInfo)
}
}
2019-01-23 21:39:13 +08:00
if len(md5sum) < 32 {
2019-02-12 12:55:55 +08:00
result.Message = "md5 unvalid"
w.Write([]byte(this.util.JsonEncodePretty(result)))
2019-01-08 16:53:03 +08:00
return
}
2019-01-20 11:05:22 +08:00
if fileInfo, err = this.GetFileInfoFromLevelDB(md5sum); err != nil {
2019-02-22 20:55:28 +08:00
result.Message = err.Error()
w.Write([]byte(this.util.JsonEncodePretty(result)))
2019-01-08 16:53:03 +08:00
return
}
2019-02-22 20:55:28 +08:00
if fileInfo.OffSet != -1 {
2019-02-23 10:07:00 +08:00
result.Message = "small file delete not support"
2019-02-22 20:55:28 +08:00
w.Write([]byte(this.util.JsonEncodePretty(result)))
return
2019-02-15 17:48:55 +08:00
}
2019-02-22 20:55:28 +08:00
name = fileInfo.Name
2019-01-08 16:53:03 +08:00
if fileInfo.ReName != "" {
2019-02-22 20:55:28 +08:00
name = fileInfo.ReName
2019-01-08 16:53:03 +08:00
}
2019-02-22 20:55:28 +08:00
fpath = fileInfo.Path + "/" + name
2019-02-22 16:44:27 +08:00
if fileInfo.Path != "" && this.util.FileExists(DOCKER_DIR+fpath) {
2019-02-23 10:07:00 +08:00
this.SaveFileMd5Log(fileInfo, CONST_REMOME_Md5_FILE_NAME)
2019-02-22 16:44:27 +08:00
if err = os.Remove(DOCKER_DIR + fpath); err != nil {
2019-02-22 20:55:28 +08:00
result.Message = err.Error()
w.Write([]byte(this.util.JsonEncodePretty(result)))
2019-01-08 16:53:03 +08:00
return
} else {
2019-02-12 12:55:55 +08:00
result.Message = "remove success"
result.Status = "ok"
w.Write([]byte(this.util.JsonEncodePretty(result)))
2019-01-08 16:53:03 +08:00
return
}
}
2019-02-12 12:55:55 +08:00
result.Message = "fail remove"
w.Write([]byte(this.util.JsonEncodePretty(result)))
2019-01-08 16:53:03 +08:00
}
2019-02-11 16:02:34 +08:00
func (this *Server) getRequestURI(action string) string {
var (
uri string
)
if Config().SupportGroupManage {
2019-02-12 12:55:55 +08:00
uri = "/" + Config().Group + "/" + action
2019-02-11 16:02:34 +08:00
} else {
uri = "/" + action
}
return uri
}
2019-02-15 17:48:55 +08:00
func (this *Server) BuildFileResult(fileInfo *FileInfo, r *http.Request) FileResult {
var (
outname string
fileResult FileResult
p string
downloadUrl string
domain string
)
if Config().DownloadDomain != "" {
domain = fmt.Sprintf("http://%s", Config().DownloadDomain)
} else {
domain = fmt.Sprintf("http://%s", r.Host)
}
outname = fileInfo.Name
if fileInfo.ReName != "" {
outname = fileInfo.ReName
}
2019-02-16 02:58:33 +08:00
p = strings.Replace(fileInfo.Path, STORE_DIR_NAME+"/", "", 1)
2019-02-15 17:48:55 +08:00
p = Config().Group + "/" + p + "/" + outname
downloadUrl = fmt.Sprintf("http://%s/%s", r.Host, p)
if Config().DownloadDomain != "" {
downloadUrl = fmt.Sprintf("http://%s/%s", Config().DownloadDomain, p)
}
fileResult.Url = downloadUrl
fileResult.Md5 = fileInfo.Md5
fileResult.Path = "/" + p
fileResult.Domain = domain
fileResult.Scene = fileInfo.Scene
// Just for Compatibility
fileResult.Src = fileResult.Path
fileResult.Scenes = fileInfo.Scene
return fileResult
}
func (this *Server) SaveUploadFile(file multipart.File, header *multipart.FileHeader, fileInfo *FileInfo, r *http.Request) (*FileInfo, error) {
var (
err error
outFile *os.File
folder string
fi os.FileInfo
)
defer file.Close()
fileInfo.Name = header.Filename
if Config().RenameFile {
fileInfo.ReName = this.util.MD5(this.util.GetUUID()) + path.Ext(fileInfo.Name)
}
folder = time.Now().Format("20060102/15/04")
if Config().PeerId != "" {
folder = fmt.Sprintf(folder+"/%s", Config().PeerId)
}
if fileInfo.Scene != "" {
folder = fmt.Sprintf(STORE_DIR+"/%s/%s", fileInfo.Scene, folder)
} else {
folder = fmt.Sprintf(STORE_DIR+"/%s", folder)
}
if fileInfo.Path != "" {
if strings.HasPrefix(fileInfo.Path, STORE_DIR) {
folder = fileInfo.Path
} else {
folder = STORE_DIR + "/" + fileInfo.Path
}
}
if !this.util.FileExists(folder) {
os.MkdirAll(folder, 0775)
}
outPath := fmt.Sprintf(folder+"/%s", fileInfo.Name)
if Config().RenameFile {
outPath = fmt.Sprintf(folder+"/%s", fileInfo.ReName)
}
2019-02-27 17:13:25 +08:00
if this.util.FileExists(outPath) && Config().EnableDistinctFile {
2019-02-15 17:48:55 +08:00
for i := 0; i < 10000; i++ {
outPath = fmt.Sprintf(folder+"/%d_%s", i, header.Filename)
fileInfo.Name = fmt.Sprintf("%d_%s", i, header.Filename)
if !this.util.FileExists(outPath) {
break
}
}
}
log.Info(fmt.Sprintf("upload: %s", outPath))
if outFile, err = os.Create(outPath); err != nil {
return fileInfo, err
}
defer outFile.Close()
if err != nil {
log.Error(err)
return fileInfo, errors.New("(error)fail," + err.Error())
}
if _, err = io.Copy(outFile, file); err != nil {
log.Error(err)
return fileInfo, errors.New("(error)fail," + err.Error())
}
if fi, err = outFile.Stat(); err != nil {
log.Error(err)
} else {
fileInfo.Size = fi.Size()
}
if fi.Size() != header.Size {
return fileInfo, errors.New("(error)file uncomplete")
}
v := this.util.GetFileSum(outFile, Config().FileSumArithmetic)
fileInfo.Md5 = v
2019-02-16 02:58:33 +08:00
//fileInfo.Path = folder //strings.Replace( folder,DOCKER_DIR,"",1)
2019-02-16 10:24:46 +08:00
fileInfo.Path = strings.Replace(folder, DOCKER_DIR, "", 1)
2019-02-15 17:48:55 +08:00
fileInfo.Peers = append(fileInfo.Peers, fmt.Sprintf("http://%s", r.Host))
2019-02-16 10:24:46 +08:00
//fmt.Println("upload",fileInfo)
2019-02-15 17:48:55 +08:00
return fileInfo, nil
}
2018-05-10 18:19:04 +08:00
func (this *Server) Upload(w http.ResponseWriter, r *http.Request) {
2019-01-02 17:46:30 +08:00
var (
err error
// pathname string
md5sum string
fileInfo FileInfo
uploadFile multipart.File
uploadHeader *multipart.FileHeader
2019-01-08 10:23:13 +08:00
scene string
2019-01-08 16:53:03 +08:00
output string
fileResult FileResult
data []byte
2019-01-02 17:46:30 +08:00
)
2019-02-15 17:48:55 +08:00
//r.ParseForm()
2017-09-09 16:40:55 +08:00
if r.Method == "POST" {
2019-01-01 23:31:14 +08:00
// name := r.PostFormValue("name")
2019-01-04 10:22:03 +08:00
// fileInfo.Path = r.Header.Get("Sync-Path")
2019-02-15 17:48:55 +08:00
md5sum = r.FormValue("md5")
output = r.FormValue("output")
2019-02-16 02:58:33 +08:00
//if strings.Contains(r.Host, "127.0.0.1") {
// w.Write([]byte( "(error) upload use clust ip(peers ip),not 127.0.0.1"))
// return
//}
2019-01-04 10:22:03 +08:00
if Config().EnableCustomPath {
2019-01-08 16:53:03 +08:00
fileInfo.Path = r.FormValue("path")
fileInfo.Path = strings.Trim(fileInfo.Path, "/")
2019-01-04 10:22:03 +08:00
}
2019-01-08 16:53:03 +08:00
scene = r.FormValue("scene")
2019-01-09 16:04:15 +08:00
if scene == "" {
//Just for Compatibility
scene = r.FormValue("scenes")
}
2019-01-08 16:53:03 +08:00
fileInfo.Md5 = md5sum
2019-02-14 19:02:51 +08:00
fileInfo.OffSet = -1
2019-01-09 16:04:15 +08:00
if uploadFile, uploadHeader, err = r.FormFile("file"); err != nil {
2019-02-01 13:13:23 +08:00
log.Error(err)
2019-01-09 16:04:15 +08:00
w.Write([]byte(err.Error()))
return
}
2019-01-02 17:46:30 +08:00
fileInfo.Peers = []string{}
2019-01-16 15:30:53 +08:00
fileInfo.TimeStamp = time.Now().Unix()
2019-01-08 10:23:13 +08:00
if scene == "" {
scene = Config().DefaultScene
}
2019-01-08 16:53:03 +08:00
if output == "" {
output = "text"
}
if !this.util.Contains(output, []string{"json", "text"}) {
w.Write([]byte("output just support json or text"))
return
}
2019-01-08 10:23:13 +08:00
fileInfo.Scene = scene
if _, err = this.CheckScene(scene); err != nil {
w.Write([]byte(err.Error()))
return
}
2017-09-09 16:40:55 +08:00
if err != nil {
2018-05-10 13:31:34 +08:00
log.Error(err)
2017-09-09 16:40:55 +08:00
http.Redirect(w, r, "/", http.StatusMovedPermanently)
return
}
2019-02-15 17:48:55 +08:00
if _, err = this.SaveUploadFile(uploadFile, uploadHeader, &fileInfo, r); err != nil {
2019-02-14 19:02:51 +08:00
w.Write([]byte(err.Error()))
return
}
2019-02-27 13:12:54 +08:00
if Config().EnableDistinctFile {
if v, _ := this.GetFileInfoFromLevelDB(fileInfo.Md5); v != nil && v.Md5 != "" {
fileResult = this.BuildFileResult(v, r)
if Config().RenameFile {
os.Remove(DOCKER_DIR + fileInfo.Path + "/" + fileInfo.ReName)
} else {
os.Remove(DOCKER_DIR + fileInfo.Path + "/" + fileInfo.Name)
2019-01-08 16:53:03 +08:00
}
2019-02-27 13:12:54 +08:00
if output == "json" {
if data, err = json.Marshal(fileResult); err != nil {
log.Error(err)
w.Write([]byte(err.Error()))
}
w.Write(data)
} else {
w.Write([]byte(fileResult.Url))
}
return
2019-01-08 16:53:03 +08:00
}
2018-12-30 17:17:40 +08:00
}
2019-01-02 17:46:30 +08:00
if fileInfo.Md5 == "" {
log.Warn(" fileInfo.Md5 is null")
return
2018-12-31 18:00:13 +08:00
}
2019-01-02 17:46:30 +08:00
if md5sum != "" && fileInfo.Md5 != md5sum {
log.Warn(" fileInfo.Md5 and md5sum !=")
2018-12-31 18:00:13 +08:00
return
}
2019-02-14 19:02:51 +08:00
if Config().EnableMergeSmallFile && fileInfo.Size < CONST_SMALL_FILE_SIZE {
if err = this.SaveSmallFile(&fileInfo); err != nil {
log.Error(err)
return
}
}
2019-03-01 10:10:55 +08:00
this.saveFileMd5Log(&fileInfo, CONST_FILE_Md5_FILE_NAME)//maybe slow
2019-01-28 17:01:26 +08:00
go this.postFileToPeer(&fileInfo)
2019-02-14 19:02:51 +08:00
if fileInfo.Size <= 0 {
log.Error("file size is zero")
return
2019-01-02 17:46:30 +08:00
}
2019-02-15 17:48:55 +08:00
fileResult = this.BuildFileResult(&fileInfo, r)
2019-01-08 16:53:03 +08:00
if output == "json" {
if data, err = json.Marshal(fileResult); err != nil {
2019-02-15 17:48:55 +08:00
log.Error(err)
2019-01-08 16:53:03 +08:00
w.Write([]byte(err.Error()))
}
w.Write(data)
} else {
2019-02-15 17:48:55 +08:00
w.Write([]byte(fileResult.Url))
2019-01-08 16:53:03 +08:00
}
2018-12-31 17:46:24 +08:00
return
2018-05-10 13:31:34 +08:00
} else {
2019-02-15 17:48:55 +08:00
md5sum = r.FormValue("md5")
output = r.FormValue("output")
if md5sum == "" {
w.Write([]byte("(error) if you want to upload fast md5 is require" +
",and if you want to upload file,you must use post method "))
return
}
if v, _ := this.GetFileInfoFromLevelDB(md5sum); v != nil && v.Md5 != "" {
fileResult = this.BuildFileResult(v, r)
if output == "json" {
if data, err = json.Marshal(fileResult); err != nil {
log.Error(err)
w.Write([]byte(err.Error()))
}
w.Write(data)
} else {
w.Write([]byte(fileResult.Url))
}
return
}
2018-12-29 20:31:29 +08:00
w.Write([]byte("(error)fail,please use post method"))
2018-05-10 13:31:34 +08:00
return
}
2019-01-02 17:46:30 +08:00
}
2019-02-14 19:02:51 +08:00
func (this *Server) SaveSmallFile(fileInfo *FileInfo) (error) {
var (
err error
filename string
fpath string
srcFile *os.File
desFile *os.File
largeDir string
destPath string
reName string
2019-02-22 16:23:59 +08:00
fileExt string
2019-02-14 19:02:51 +08:00
)
filename = fileInfo.Name
2019-02-22 16:23:59 +08:00
fileExt = path.Ext(filename)
2019-02-14 19:02:51 +08:00
if fileInfo.ReName != "" {
filename = fileInfo.ReName
}
2019-02-16 02:58:33 +08:00
fpath = DOCKER_DIR + fileInfo.Path + "/" + filename
2019-02-14 19:02:51 +08:00
largeDir = LARGE_DIR + "/" + Config().PeerId
if !this.util.FileExists(largeDir) {
os.MkdirAll(largeDir, 0775)
}
2019-02-15 11:35:49 +08:00
reName = fmt.Sprintf("%d", this.util.RandInt(100, 300))
2019-02-14 19:02:51 +08:00
destPath = largeDir + "/" + reName
this.lockMap.LockKey(destPath)
defer this.lockMap.UnLockKey(destPath)
if this.util.FileExists(fpath) {
srcFile, err = os.OpenFile(fpath, os.O_CREATE|os.O_RDONLY, 06666)
if err != nil {
return err
}
defer srcFile.Close()
desFile, err = os.OpenFile(destPath, os.O_CREATE|os.O_RDWR, 06666)
if err != nil {
return err
}
defer desFile.Close()
fileInfo.OffSet, err = desFile.Seek(0, 2)
2019-02-15 11:35:49 +08:00
if _, err = desFile.Write([]byte("1")); err != nil { //first byte set 1
2019-02-14 19:02:51 +08:00
return err
}
fileInfo.OffSet, err = desFile.Seek(0, 2)
if err != nil {
return err
}
2019-02-15 11:35:49 +08:00
fileInfo.OffSet = fileInfo.OffSet - 1 //minus 1 byte
fileInfo.Size = fileInfo.Size + 1
2019-02-22 16:23:59 +08:00
fileInfo.ReName = fmt.Sprintf("%s,%d,%d,%s", reName, fileInfo.OffSet, fileInfo.Size, fileExt)
2019-02-14 19:02:51 +08:00
if _, err = io.Copy(desFile, srcFile); err != nil {
return err
}
srcFile.Close()
os.Remove(fpath)
2019-02-16 10:24:46 +08:00
fileInfo.Path = strings.Replace(largeDir, DOCKER_DIR, "", 1)
2019-02-14 19:02:51 +08:00
}
return nil
}
2019-01-09 12:05:20 +08:00
func (this *Server) SendToMail(to, subject, body, mailtype string) error {
host := Config().Mail.Host
user := Config().Mail.User
password := Config().Mail.Password
hp := strings.Split(host, ":")
auth := smtp.PlainAuth("", user, password, hp[0])
2019-02-01 11:31:19 +08:00
var contentType string
2019-01-09 12:05:20 +08:00
if mailtype == "html" {
2019-02-01 11:31:19 +08:00
contentType = "Content-Type: text/" + mailtype + "; charset=UTF-8"
2019-01-09 12:05:20 +08:00
} else {
2019-02-01 11:31:19 +08:00
contentType = "Content-Type: text/plain" + "; charset=UTF-8"
2019-01-09 12:05:20 +08:00
}
2019-02-01 11:31:19 +08:00
msg := []byte("To: " + to + "\r\nFrom: " + user + ">\r\nSubject: " + "\r\n" + contentType + "\r\n\r\n" + body)
sendTo := strings.Split(to, ";")
err := smtp.SendMail(host, auth, user, sendTo, msg)
2019-01-09 12:05:20 +08:00
return err
}
2019-01-04 10:22:03 +08:00
func (this *Server) BenchMark(w http.ResponseWriter, r *http.Request) {
t := time.Now()
batch := new(leveldb.Batch)
for i := 0; i < 100000000; i++ {
f := FileInfo{}
f.Peers = []string{"http://192.168.0.1", "http://192.168.2.5"}
f.Path = "20190201/19/02"
s := strconv.Itoa(i)
2019-01-18 10:32:25 +08:00
s = this.util.MD5(s)
2019-01-04 10:22:03 +08:00
f.Name = s
f.Md5 = s
if data, err := json.Marshal(&f); err == nil {
batch.Put([]byte(s), data)
}
if i%10000 == 0 {
if batch.Len() > 0 {
2019-01-16 10:28:51 +08:00
server.ldb.Write(batch, nil)
2019-01-04 10:22:03 +08:00
// batch = new(leveldb.Batch)
batch.Reset()
}
fmt.Println(i, time.Since(t).Seconds())
}
2019-01-20 11:05:22 +08:00
//fmt.Println(server.GetFileInfoFromLevelDB(s))
2019-01-04 10:22:03 +08:00
}
2019-01-18 10:32:25 +08:00
this.util.WriteFile("time.txt", time.Since(t).String())
2019-01-04 10:22:03 +08:00
fmt.Println(time.Since(t).String())
}
2019-01-16 15:30:53 +08:00
func (this *Server) RepairStatWeb(w http.ResponseWriter, r *http.Request) {
2019-02-12 12:55:55 +08:00
var (
result JsonResult
2019-02-24 14:09:06 +08:00
date string
2019-02-25 16:24:28 +08:00
inner string
2019-02-12 12:55:55 +08:00
)
2019-02-25 16:24:28 +08:00
if !this.IsPeer(r) {
result.Message = this.GetClusterNotPermitMessage(r)
w.Write([]byte(this.util.JsonEncodePretty(result)))
return
}
2019-02-24 14:09:06 +08:00
date = r.FormValue("date")
2019-02-25 16:24:28 +08:00
inner = r.FormValue("inner")
2019-02-26 16:57:41 +08:00
if ok, err := regexp.MatchString("\\d{8}", date); err != nil || !ok {
result.Message = "invalid date"
w.Write([]byte(this.util.JsonEncodePretty(result)))
return
}
if date == "" || len(date) != 8 {
2019-02-24 14:09:06 +08:00
date = this.util.GetToDay()
2019-02-23 23:01:40 +08:00
}
2019-02-25 16:24:28 +08:00
if inner != "1" {
for _, peer := range Config().Peers {
req := httplib.Post(peer + this.getRequestURI("repair_stat"))
req.Param("inner", "1")
req.Param("date", date)
if _, err := req.String(); err != nil {
log.Error(err)
}
}
}
result.Data = this.RepairStatByDate(date)
2019-02-12 12:55:55 +08:00
result.Status = "ok"
w.Write([]byte(this.util.JsonEncodePretty(result)))
2019-01-16 15:30:53 +08:00
}
2019-01-02 17:46:30 +08:00
func (this *Server) Stat(w http.ResponseWriter, r *http.Request) {
2019-02-12 12:55:55 +08:00
var (
2019-02-26 16:57:41 +08:00
result JsonResult
inner string
echart string
category []string
barCount []int64
barSize []int64
dataMap map[string]interface{}
2019-02-12 12:55:55 +08:00
)
2019-02-25 16:24:28 +08:00
if !this.IsPeer(r) {
result.Message = this.GetClusterNotPermitMessage(r)
w.Write([]byte(this.util.JsonEncodePretty(result)))
return
}
2019-02-12 18:35:26 +08:00
r.ParseForm()
2019-02-13 10:09:41 +08:00
inner = r.FormValue("inner")
2019-02-26 16:57:41 +08:00
echart = r.FormValue("echart")
2019-02-12 18:35:26 +08:00
data := this.GetStat()
2019-02-12 12:55:55 +08:00
result.Status = "ok"
result.Data = data
2019-02-26 16:57:41 +08:00
if echart == "1" {
dataMap = make(map[string]interface{}, 3)
for _, v := range data {
barCount = append(barCount, v.FileCount)
barSize = append(barSize, v.TotalSize)
category = append(category, v.Date)
}
dataMap["category"] = category
dataMap["barCount"] = barCount
dataMap["barSize"] = barSize
result.Data = dataMap
}
2019-02-13 10:09:41 +08:00
if inner == "1" {
2019-02-12 18:35:26 +08:00
w.Write([]byte(this.util.JsonEncodePretty(data)))
} else {
w.Write([]byte(this.util.JsonEncodePretty(result)))
}
2019-01-17 21:46:29 +08:00
}
func (this *Server) GetStat() []StatDateFileInfo {
2019-01-09 12:05:20 +08:00
var (
2019-02-25 10:38:32 +08:00
min int64
max int64
err error
i int64
2019-02-24 18:23:52 +08:00
rows []StatDateFileInfo
total StatDateFileInfo
2019-01-09 12:05:20 +08:00
)
min = 20190101
max = 20190101
for k := range this.statMap.Get() {
2019-01-09 12:05:20 +08:00
ks := strings.Split(k, "_")
if len(ks) == 2 {
if i, err = strconv.ParseInt(ks[0], 10, 64); err != nil {
continue
}
if i >= max {
max = i
}
if i < min {
min = i
}
}
}
for i := min; i <= max; i++ {
s := fmt.Sprintf("%d", i)
2019-01-18 10:32:25 +08:00
if v, ok := this.statMap.GetValue(s + "_" + CONST_STAT_FILE_TOTAL_SIZE_KEY); ok {
2019-01-09 12:05:20 +08:00
var info StatDateFileInfo
info.Date = s
switch v.(type) {
case int64:
info.TotalSize = v.(int64)
2019-02-24 18:23:52 +08:00
total.TotalSize = total.TotalSize + v.(int64)
2019-01-09 12:05:20 +08:00
}
2019-01-18 10:32:25 +08:00
if v, ok := this.statMap.GetValue(s + "_" + CONST_STAT_FILE_COUNT_KEY); ok {
2019-01-09 12:05:20 +08:00
switch v.(type) {
case int64:
info.FileCount = v.(int64)
2019-02-24 18:23:52 +08:00
total.FileCount = total.FileCount + v.(int64)
2019-01-09 12:05:20 +08:00
}
}
rows = append(rows, info)
2019-01-02 17:46:30 +08:00
}
2017-09-09 16:40:55 +08:00
}
2019-02-24 18:23:52 +08:00
total.Date = "all"
rows = append(rows, total)
2019-01-17 21:46:29 +08:00
return rows
2019-01-09 12:05:20 +08:00
}
func (this *Server) RegisterExit() {
c := make(chan os.Signal)
signal.Notify(c, syscall.SIGHUP, syscall.SIGINT, syscall.SIGTERM, syscall.SIGQUIT)
go func() {
for s := range c {
switch s {
case syscall.SIGHUP, syscall.SIGINT, syscall.SIGTERM, syscall.SIGQUIT:
2019-01-16 10:28:51 +08:00
this.ldb.Close()
2019-01-09 12:05:20 +08:00
log.Info("Exit", s)
os.Exit(1)
}
}
}()
}
2019-01-20 23:45:16 +08:00
func (this *Server) AppendToQueue(fileInfo *FileInfo) {
2019-02-28 11:59:34 +08:00
for (len(this.queueToPeers) + CONST_QUEUE_SIZE/10) > CONST_QUEUE_SIZE {
time.Sleep(time.Second * 1)
}
2019-01-28 17:01:26 +08:00
this.queueToPeers <- *fileInfo
2019-01-21 15:07:42 +08:00
}
func (this *Server) AppendToDownloadQueue(fileInfo *FileInfo) {
2019-02-28 11:59:34 +08:00
for (len(this.queueFromPeers) + CONST_QUEUE_SIZE/10) > CONST_QUEUE_SIZE {
time.Sleep(time.Second * 1)
}
2019-01-28 17:01:26 +08:00
this.queueFromPeers <- *fileInfo
2019-01-21 15:07:42 +08:00
}
func (this *Server) ConsumerDownLoad() {
ConsumerFunc := func() {
for {
fileInfo := <-this.queueFromPeers
if len(fileInfo.Peers) <= 0 {
2019-02-11 17:39:00 +08:00
log.Warn("Peer is null", fileInfo)
2019-01-21 15:07:42 +08:00
continue
}
for _, peer := range fileInfo.Peers {
2019-01-28 17:01:26 +08:00
if strings.Contains(peer, "127.0.0.1") {
2019-02-01 11:31:19 +08:00
log.Warn("sync error with 127.0.0.1", fileInfo)
2019-01-28 17:01:26 +08:00
continue
}
2019-01-21 16:58:16 +08:00
if peer != this.host {
2019-01-21 15:07:42 +08:00
this.DownloadFromPeer(peer, &fileInfo)
break
}
}
}
}
for i := 0; i < 50; i++ {
go ConsumerFunc()
}
2019-01-20 23:45:16 +08:00
}
2019-02-23 10:07:00 +08:00
func (this *Server) ConsumerLog() {
go func() {
var (
fileLog *FileLog
)
for {
fileLog = <-this.queueFileLog
this.saveFileMd5Log(fileLog.FileInfo, fileLog.FileName)
}
}()
}
2019-03-01 10:10:55 +08:00
func (this *Server) ConsumerPostToPeer() {
2019-01-17 01:05:27 +08:00
ConsumerFunc := func() {
for {
2019-01-18 10:32:25 +08:00
fileInfo := <-this.queueToPeers
2019-01-18 22:49:09 +08:00
this.postFileToPeer(&fileInfo)
2019-01-17 01:05:27 +08:00
}
}
for i := 0; i < 50; i++ {
go ConsumerFunc()
}
}
2019-02-01 11:31:19 +08:00
func (this *Server) AutoRepair(forceRepair bool) {
2019-02-12 18:35:26 +08:00
if this.lockMap.IsLock("AutoRepair") {
log.Warn("Lock AutoRepair")
return
}
this.lockMap.LockKey("AutoRepair")
defer this.lockMap.UnLockKey("AutoRepair")
2019-02-01 11:31:19 +08:00
AutoRepairFunc := func(forceRepair bool) {
2019-01-17 21:46:29 +08:00
var (
dateStats []StatDateFileInfo
err error
countKey string
md5s string
localSet mapset.Set
2019-01-18 19:05:18 +08:00
remoteSet mapset.Set
allSet mapset.Set
2019-01-20 11:05:22 +08:00
tmpSet mapset.Set
fileInfo *FileInfo
2019-01-17 21:46:29 +08:00
)
defer func() {
if re := recover(); re != nil {
buffer := debug.Stack()
log.Error("AutoRepair")
log.Error(re)
log.Error(string(buffer))
}
}()
2019-01-28 17:01:26 +08:00
Update := func(peer string, dateStat StatDateFileInfo) { //从远端拉数据过来
2019-02-12 12:55:55 +08:00
req := httplib.Get(fmt.Sprintf("%s%s?date=%s&force=%s", peer, this.getRequestURI("sync"), dateStat.Date, "1"))
req.SetTimeout(time.Second*5, time.Second*5)
if _, err = req.String(); err != nil {
2019-01-17 21:46:29 +08:00
log.Error(err)
}
log.Info(fmt.Sprintf("syn file from %s date %s", peer, dateStat.Date))
2019-01-17 21:46:29 +08:00
}
for _, peer := range Config().Peers {
2019-02-12 18:35:26 +08:00
req := httplib.Post(fmt.Sprintf("%s%s", peer, this.getRequestURI("stat")))
2019-02-13 10:09:41 +08:00
req.Param("inner", "1")
2019-02-12 18:35:26 +08:00
req.SetTimeout(time.Second*5, time.Second*15)
if err = req.ToJSON(&dateStats); err != nil {
2019-01-17 21:46:29 +08:00
log.Error(err)
continue
}
for _, dateStat := range dateStats {
if dateStat.Date == "all" {
2019-01-17 21:46:29 +08:00
continue
}
countKey = dateStat.Date + "_" + CONST_STAT_FILE_COUNT_KEY
if v, ok := this.statMap.GetValue(countKey); ok {
2019-01-17 21:46:29 +08:00
switch v.(type) {
case int64:
2019-02-01 11:31:19 +08:00
if v.(int64) != dateStat.FileCount || forceRepair { //不相等,找差异
//TODO
2019-02-12 12:55:55 +08:00
req := httplib.Post(fmt.Sprintf("%s%s", peer, this.getRequestURI("get_md5s_by_date")))
2019-02-12 19:54:19 +08:00
req.SetTimeout(time.Second*15, time.Second*60)
req.Param("date", dateStat.Date)
if md5s, err = req.String(); err != nil {
continue
2019-01-18 19:05:18 +08:00
}
2019-01-20 11:05:22 +08:00
if localSet, err = this.GetMd5sByDate(dateStat.Date, CONST_FILE_Md5_FILE_NAME); err != nil {
log.Error(err)
continue
2019-01-18 19:05:18 +08:00
}
remoteSet = this.util.StrToMapSet(md5s, ",")
allSet = localSet.Union(remoteSet)
md5s = this.util.MapSetToStr(allSet.Difference(localSet), ",")
2019-02-12 12:55:55 +08:00
req = httplib.Post(fmt.Sprintf("%s%s", peer, this.getRequestURI("receive_md5s")))
2019-02-12 19:54:19 +08:00
req.SetTimeout(time.Second*15, time.Second*60)
req.Param("md5s", md5s)
req.String()
2019-01-20 11:05:22 +08:00
tmpSet = allSet.Difference(remoteSet)
for v := range tmpSet.Iter() {
2019-01-28 17:01:26 +08:00
if v != nil {
if fileInfo, err = this.GetFileInfoFromLevelDB(v.(string)); err != nil {
log.Error(err)
continue
}
this.AppendToQueue(fileInfo)
2019-01-19 10:35:42 +08:00
}
}
2019-01-18 19:05:18 +08:00
//Update(peer,dateStat)
2019-01-17 21:46:29 +08:00
}
}
} else {
Update(peer, dateStat)
2019-01-17 21:46:29 +08:00
}
}
}
}
2019-02-01 11:31:19 +08:00
AutoRepairFunc(forceRepair)
2019-01-17 21:46:29 +08:00
}
2019-02-25 16:24:28 +08:00
func (this *Server) CleanLogLevelDBByDate(date string, filename string) {
2019-02-24 14:09:06 +08:00
defer func() {
if re := recover(); re != nil {
buffer := debug.Stack()
2019-02-25 16:24:28 +08:00
log.Error("CleanLogLevelDBByDate")
2019-02-24 14:09:06 +08:00
log.Error(re)
log.Error(string(buffer))
}
}()
var (
err error
keyPrefix string
keys mapset.Set
)
keys = mapset.NewSet()
keyPrefix = "%s_%s_"
keyPrefix = fmt.Sprintf(keyPrefix, date, filename)
2019-02-25 16:24:28 +08:00
iter := server.logDB.NewIterator(util.BytesPrefix([]byte(keyPrefix)), nil)
2019-02-24 14:09:06 +08:00
for iter.Next() {
keys.Add(string(iter.Value()))
}
iter.Release()
for key := range keys.Iter() {
err = this.RemoveKeyFromLevelDB(key.(string), this.logDB)
if err != nil {
log.Error(err)
}
}
}
2019-02-25 10:22:51 +08:00
func (this *Server) CleanAndBackUp() {
2019-02-24 14:09:06 +08:00
Clean := func() {
2019-01-28 17:01:26 +08:00
var (
2019-02-25 10:22:51 +08:00
filenames []string
2019-02-24 14:09:06 +08:00
yesterday string
2019-01-28 17:01:26 +08:00
)
2019-02-25 10:22:51 +08:00
if this.curDate != this.util.GetToDay() {
filenames = []string{CONST_Md5_QUEUE_FILE_NAME, CONST_Md5_ERROR_FILE_NAME, CONST_REMOME_Md5_FILE_NAME}
yesterday = this.util.GetDayFromTimeStamp(time.Now().AddDate(0, 0, -1).Unix())
for _, filename := range filenames {
2019-02-25 16:24:28 +08:00
this.CleanLogLevelDBByDate(yesterday, filename)
2019-02-25 10:22:51 +08:00
}
this.BackUpMetaDataByDate(yesterday)
2019-02-25 10:38:32 +08:00
this.curDate = this.util.GetToDay()
2019-01-28 17:01:26 +08:00
}
}
go func() {
for {
2019-02-25 10:22:51 +08:00
time.Sleep(time.Hour * 6)
2019-01-28 17:01:26 +08:00
Clean()
}
}()
}
2019-02-24 14:09:06 +08:00
func (this *Server) LoadFileInfoByDate(date string, filename string) (mapset.Set, error) {
defer func() {
if re := recover(); re != nil {
buffer := debug.Stack()
log.Error("LoadFileInfoByDate")
log.Error(re)
log.Error(string(buffer))
}
}()
var (
err error
keyPrefix string
fileInfos mapset.Set
)
fileInfos = mapset.NewSet()
keyPrefix = "%s_%s_"
keyPrefix = fmt.Sprintf(keyPrefix, date, filename)
iter := server.logDB.NewIterator(util.BytesPrefix([]byte(keyPrefix)), nil)
for iter.Next() {
var fileInfo FileInfo
if err = json.Unmarshal(iter.Value(), &fileInfo); err != nil {
continue
}
fileInfos.Add(&fileInfo)
}
iter.Release()
return fileInfos, nil
}
2019-03-01 10:10:55 +08:00
func (this *Server) LoadQueueSendToPeer() {
2019-02-24 14:09:06 +08:00
if queue, err := this.LoadFileInfoByDate(this.util.GetToDay(), CONST_Md5_QUEUE_FILE_NAME); err != nil {
log.Error(err)
} else {
for fileInfo := range queue.Iter() {
2019-02-28 11:59:34 +08:00
//this.queueFromPeers <- *fileInfo.(*FileInfo)
this.AppendToDownloadQueue(fileInfo.(*FileInfo))
2019-02-24 14:09:06 +08:00
}
}
}
2019-03-01 10:10:55 +08:00
func (this *Server) CheckClusterStatus() {
2019-01-09 13:13:21 +08:00
check := func() {
defer func() {
if re := recover(); re != nil {
buffer := debug.Stack()
2019-03-01 10:10:55 +08:00
log.Error("CheckClusterStatus")
2019-01-09 13:13:21 +08:00
log.Error(re)
log.Error(string(buffer))
}
}()
var (
2019-02-12 12:55:55 +08:00
status JsonResult
2019-01-09 13:13:21 +08:00
err error
subject string
body string
2019-01-09 14:48:15 +08:00
req *httplib.BeegoHTTPRequest
2019-01-09 13:13:21 +08:00
)
for _, peer := range Config().Peers {
2019-02-12 12:55:55 +08:00
req = httplib.Get(fmt.Sprintf("%s%s", peer, this.getRequestURI("status")))
2019-01-09 13:13:21 +08:00
req.SetTimeout(time.Second*5, time.Second*5)
err = req.ToJSON(&status)
if status.Status != "ok" {
for _, to := range Config().AlramReceivers {
2019-01-09 14:48:15 +08:00
subject = "fastdfs server error"
2019-01-09 13:13:21 +08:00
if err != nil {
2019-01-09 14:48:15 +08:00
body = fmt.Sprintf("%s\nserver:%s\nerror:\n%s", subject, peer, err.Error())
} else {
body = fmt.Sprintf("%s\nserver:%s\n", subject, peer)
}
if err = this.SendToMail(to, subject, body, "text"); err != nil {
log.Error(err)
}
}
if Config().AlarmUrl != "" {
req = httplib.Post(Config().AlarmUrl)
req.SetTimeout(time.Second*10, time.Second*10)
req.Param("message", body)
req.Param("subject", subject)
if _, err = req.String(); err != nil {
log.Error(err)
2019-01-09 13:13:21 +08:00
}
}
}
}
}
go func() {
for {
2019-01-09 14:57:43 +08:00
time.Sleep(time.Minute * 10)
2019-01-09 13:13:21 +08:00
check()
}
}()
}
2019-02-12 11:08:19 +08:00
func (this *Server) RepairFileInfo(w http.ResponseWriter, r *http.Request) {
2019-02-12 12:55:55 +08:00
var (
result JsonResult
)
2019-02-12 11:08:19 +08:00
if !this.IsPeer(r) {
w.Write([]byte(this.GetClusterNotPermitMessage(r)))
return
}
2019-02-26 18:51:39 +08:00
if !Config().EnableMigrate {
w.Write([]byte("please set enable_migrate=true"))
return
}
2019-02-12 12:55:55 +08:00
result.Status = "ok"
2019-02-26 18:51:39 +08:00
result.Message = "repair job start,don't try again,very danger "
go this.RepairFileInfoFromFile()
2019-02-12 12:55:55 +08:00
w.Write([]byte(this.util.JsonEncodePretty(result)))
2019-02-12 11:08:19 +08:00
}
2019-01-22 19:26:05 +08:00
func (this *Server) Reload(w http.ResponseWriter, r *http.Request) {
var (
2019-02-25 10:38:32 +08:00
err error
data []byte
2019-02-12 12:55:55 +08:00
cfg GloablConfig
action string
2019-02-12 11:08:19 +08:00
cfgjson string
2019-02-12 12:55:55 +08:00
result JsonResult
2019-01-22 19:26:05 +08:00
)
2019-02-12 12:55:55 +08:00
result.Status = "fail"
2019-02-12 11:08:19 +08:00
r.ParseForm()
2019-01-22 19:26:05 +08:00
if !this.IsPeer(r) {
2019-02-11 16:02:34 +08:00
w.Write([]byte(this.GetClusterNotPermitMessage(r)))
2019-01-22 19:26:05 +08:00
return
}
2019-02-12 12:55:55 +08:00
cfgjson = r.FormValue("cfg")
action = r.FormValue("action")
_ = cfgjson
if action == "get" {
result.Data = Config()
result.Status = "ok"
w.Write([]byte(this.util.JsonEncodePretty(result)))
2019-01-22 19:26:05 +08:00
return
}
2019-02-12 12:55:55 +08:00
if action == "set" {
if cfgjson == "" {
result.Message = "(error)parameter cfg(json) require"
w.Write([]byte(this.util.JsonEncodePretty(result)))
2019-02-12 11:08:19 +08:00
return
}
2019-02-12 12:55:55 +08:00
if err = json.Unmarshal([]byte(cfgjson), &cfg); err != nil {
2019-02-12 11:08:19 +08:00
log.Error(err)
2019-02-12 12:55:55 +08:00
result.Message = err.Error()
w.Write([]byte(this.util.JsonEncodePretty(result)))
2019-02-12 11:08:19 +08:00
return
}
2019-02-12 12:55:55 +08:00
result.Status = "ok"
cfgjson = this.util.JsonEncodePretty(cfg)
this.util.WriteFile(CONST_CONF_FILE_NAME, cfgjson)
w.Write([]byte(this.util.JsonEncodePretty(result)))
2019-01-22 19:26:05 +08:00
return
}
2019-02-12 12:55:55 +08:00
if action == "reload" {
2019-02-12 11:08:19 +08:00
if data, err = ioutil.ReadFile(CONST_CONF_FILE_NAME); err != nil {
2019-02-12 12:55:55 +08:00
result.Message = err.Error()
w.Write([]byte(this.util.JsonEncodePretty(result)))
2019-02-12 11:08:19 +08:00
return
}
if err = json.Unmarshal(data, &cfg); err != nil {
2019-02-12 12:55:55 +08:00
result.Message = err.Error()
w.Write([]byte(this.util.JsonEncodePretty(result)))
2019-02-12 11:08:19 +08:00
return
}
ParseConfig(CONST_CONF_FILE_NAME)
this.initComponent(true)
2019-02-12 12:55:55 +08:00
result.Status = "ok"
w.Write([]byte(this.util.JsonEncodePretty(result)))
2019-02-12 11:08:19 +08:00
return
}
2019-02-12 12:55:55 +08:00
if action == "" {
2019-02-12 11:08:19 +08:00
w.Write([]byte("(error)action support set(json) get reload"))
}
2019-01-22 19:26:05 +08:00
}
2019-02-26 16:57:41 +08:00
func (this *Server) RemoveEmptyDir(w http.ResponseWriter, r *http.Request) {
var (
result JsonResult
)
result.Status = "ok"
if this.IsPeer(r) {
go this.util.RemoveEmptyDir(DATA_DIR)
go this.util.RemoveEmptyDir(STORE_DIR)
result.Message = "clean job start ..,don't try again!!!"
w.Write([]byte(this.util.JsonEncodePretty(result)))
} else {
result.Message = this.GetClusterNotPermitMessage(r)
w.Write([]byte(this.util.JsonEncodePretty(result)))
}
}
2019-02-25 10:22:51 +08:00
func (this *Server) BackUp(w http.ResponseWriter, r *http.Request) {
var (
date string
result JsonResult
)
result.Status = "ok"
r.ParseForm()
date = r.FormValue("date")
if date == "" {
date = this.util.GetToDay()
}
if this.IsPeer(r) {
go this.BackUpMetaDataByDate(date)
result.Message = "back job start..."
w.Write([]byte(this.util.JsonEncodePretty(result)))
} else {
result.Message = this.GetClusterNotPermitMessage(r)
w.Write([]byte(this.util.JsonEncodePretty(result)))
}
}
2019-02-26 16:57:41 +08:00
func (this *Server) Report(w http.ResponseWriter, r *http.Request) {
var (
reportFileName string
result JsonResult
html string
)
result.Status = "ok"
r.ParseForm()
if this.IsPeer(r) {
reportFileName = STATIC_DIR + "/report.html"
if this.util.IsExist(reportFileName) {
if data, err := this.util.ReadBinFile(reportFileName); err != nil {
log.Error(err)
result.Message = err.Error()
w.Write([]byte(this.util.JsonEncodePretty(result)))
return
} else {
html = string(data)
if Config().SupportGroupManage {
html = strings.Replace(html, "{group}", "/"+Config().Group, 1)
} else {
html = strings.Replace(html, "{group}", "", 1)
}
w.Write([]byte(html))
return
}
} else {
w.Write([]byte(fmt.Sprintf("%s is not found", reportFileName)))
}
} else {
w.Write([]byte(this.GetClusterNotPermitMessage(r)))
}
}
2019-01-19 10:35:42 +08:00
func (this *Server) Repair(w http.ResponseWriter, r *http.Request) {
var (
2019-02-11 16:02:34 +08:00
force string
2019-02-01 11:31:19 +08:00
forceRepair bool
2019-02-12 12:55:55 +08:00
result JsonResult
2019-01-19 10:35:42 +08:00
)
2019-02-12 12:55:55 +08:00
result.Status = "ok"
2019-01-19 10:35:42 +08:00
r.ParseForm()
2019-01-20 11:05:22 +08:00
force = r.FormValue("force")
if force == "1" {
2019-02-01 11:31:19 +08:00
forceRepair = true
2019-01-19 10:35:42 +08:00
}
if this.IsPeer(r) {
2019-02-01 11:31:19 +08:00
go this.AutoRepair(forceRepair)
2019-02-12 12:55:55 +08:00
result.Message = "repair job start..."
w.Write([]byte(this.util.JsonEncodePretty(result)))
2019-01-19 10:35:42 +08:00
} else {
2019-02-12 12:55:55 +08:00
result.Message = this.GetClusterNotPermitMessage(r)
w.Write([]byte(this.util.JsonEncodePretty(result)))
2019-01-19 10:35:42 +08:00
}
}
2019-01-09 13:13:21 +08:00
func (this *Server) Status(w http.ResponseWriter, r *http.Request) {
var (
2019-02-12 12:55:55 +08:00
status JsonResult
2019-01-09 13:13:21 +08:00
err error
data []byte
2019-01-21 16:58:16 +08:00
sts map[string]interface{}
2019-01-28 17:01:26 +08:00
today string
sumset mapset.Set
ok bool
v interface{}
2019-01-09 13:13:21 +08:00
)
2019-01-21 16:58:16 +08:00
memStat := new(runtime.MemStats)
2019-01-28 17:01:26 +08:00
runtime.ReadMemStats(memStat)
today = this.util.GetToDay()
2019-01-21 16:58:16 +08:00
sts = make(map[string]interface{})
sts["Fs.QueueFromPeers"] = len(this.queueFromPeers)
sts["Fs.QueueToPeers"] = len(this.queueToPeers)
2019-02-28 11:59:34 +08:00
sts["Fs.QueueFileLog"] = len(this.queueFileLog)
2019-01-28 17:01:26 +08:00
for _, k := range []string{CONST_FILE_Md5_FILE_NAME, CONST_Md5_ERROR_FILE_NAME, CONST_Md5_QUEUE_FILE_NAME} {
k2 := fmt.Sprintf("%s_%s", today, k)
if v, ok = this.sumMap.GetValue(k2); ok {
sumset = v.(mapset.Set)
if k == CONST_Md5_QUEUE_FILE_NAME {
sts["Fs.QueueSetSize"] = sumset.Cardinality()
}
if k == CONST_Md5_ERROR_FILE_NAME {
sts["Fs.ErrorSetSize"] = sumset.Cardinality()
}
if k == CONST_FILE_Md5_FILE_NAME {
sts["Fs.FileSetSize"] = sumset.Cardinality()
}
}
}
2019-01-21 16:58:16 +08:00
sts["Fs.AutoRepair"] = Config().AutoRepair
sts["Fs.RefreshInterval"] = Config().RefreshInterval
sts["Fs.Peers"] = Config().Peers
sts["Fs.Local"] = this.host
sts["Fs.FileStats"] = this.GetStat()
sts["Fs.ShowDir"] = Config().ShowDir
sts["Sys.NumGoroutine"] = runtime.NumGoroutine()
sts["Sys.NumCpu"] = runtime.NumCPU()
sts["Sys.Alloc"] = memStat.Alloc
sts["Sys.TotalAlloc"] = memStat.TotalAlloc
sts["Sys.HeapAlloc"] = memStat.HeapAlloc
sts["Sys.Frees"] = memStat.Frees
sts["Sys.HeapObjects"] = memStat.HeapObjects
sts["Sys.NumGC"] = memStat.NumGC
sts["Sys.GCCPUFraction"] = memStat.GCCPUFraction
sts["Sys.GCSys"] = memStat.GCSys
2019-01-28 17:01:26 +08:00
//sts["Sys.MemInfo"] = memStat
2019-01-09 13:13:21 +08:00
status.Status = "ok"
2019-01-21 16:58:16 +08:00
status.Data = sts
w.Write([]byte(this.util.JsonEncodePretty(status)))
return
2019-01-09 13:13:21 +08:00
if data, err = json.Marshal(&status); err != nil {
status.Status = "fail"
status.Message = err.Error()
w.Write(data)
return
}
w.Write(data)
2019-01-09 12:05:20 +08:00
}
func (this *Server) HeartBeat(w http.ResponseWriter, r *http.Request) {
2017-09-09 16:40:55 +08:00
}
2018-05-10 18:19:04 +08:00
func (this *Server) Index(w http.ResponseWriter, r *http.Request) {
2019-02-02 13:09:25 +08:00
var (
2019-02-20 20:14:21 +08:00
uploadUrl string
2019-02-20 11:57:32 +08:00
uploadBigUrl string
2019-02-25 10:38:32 +08:00
uppy string
2019-02-02 13:09:25 +08:00
)
2019-02-11 16:02:34 +08:00
uploadUrl = "/upload"
2019-02-20 20:14:21 +08:00
uploadBigUrl = CONST_BIG_UPLOAD_PATH_SUFFIX
2019-01-03 10:08:01 +08:00
if Config().EnableWebUpload {
2019-02-11 16:02:34 +08:00
if Config().SupportGroupManage {
uploadUrl = fmt.Sprintf("/%s/upload", Config().Group)
2019-02-20 20:14:21 +08:00
uploadBigUrl = fmt.Sprintf("/%s%s", Config().Group, CONST_BIG_UPLOAD_PATH_SUFFIX)
2019-02-02 13:09:25 +08:00
}
2019-02-25 10:38:32 +08:00
uppy = `<html>
2019-02-20 11:57:32 +08:00
<head>
<meta charset="utf-8" />
2019-02-27 17:13:25 +08:00
<title>go-fastdfs</title>
2019-02-20 11:57:32 +08:00
<style>form { bargin } .form-line { display:block;height: 30px;margin:8px; } #stdUpload {background: #fafafa;border-radius: 10px;width: 745px; }</style>
<link href="https://transloadit.edgly.net/releases/uppy/v0.30.0/dist/uppy.min.css" rel="stylesheet"></head>
<body>
2019-02-21 10:46:50 +08:00
<div>标准上传(强列建议使用这种方式)</div>
2019-02-20 11:57:32 +08:00
<div id="stdUpload">
<form action="%s" method="post" enctype="multipart/form-data">
<span class="form-line">文件(file):
<input type="file" id="file" name="file" /></span>
<span class="form-line">场景(scene):
<input type="text" id="scene" name="scene" value="%s" /></span>
<span class="form-line">输出(output):
<input type="text" id="output" name="output" value="json" /></span>
<span class="form-line">自定义路径(path):
<input type="text" id="path" name="path" value="" /></span>
<input type="submit" name="submit" value="upload" /></form>
</div>
2019-02-21 10:46:50 +08:00
<div>断点续传如果文件很大时可以考虑</div>
2019-02-20 11:57:32 +08:00
<div>
<div id="drag-drop-area"></div>
<script src="https://transloadit.edgly.net/releases/uppy/v0.30.0/dist/uppy.min.js"></script>
<script>var uppy = Uppy.Core().use(Uppy.Dashboard, {
inline: true,
target: '#drag-drop-area'
}).use(Uppy.Tus, {
endpoint: '%s'
})
uppy.on('complete', (result) => {
// console.log(result) console.log('Upload complete! Weve uploaded these files:', result.successful)
})
</script>
</div>
</body>
2019-02-25 10:22:51 +08:00
</html>`
2019-02-26 16:57:41 +08:00
uppyFileName := STATIC_DIR + "/uppy.html"
if this.util.IsExist(uppyFileName) {
if data, err := this.util.ReadBinFile(uppyFileName); err != nil {
log.Error(err)
} else {
uppy = string(data)
}
2019-02-27 17:13:25 +08:00
} else {
2019-02-28 11:59:34 +08:00
this.util.WriteFile(uppyFileName, uppy)
2019-02-26 16:57:41 +08:00
}
2019-02-25 10:22:51 +08:00
fmt.Fprintf(w,
fmt.Sprintf(uppy, uploadUrl, Config().DefaultScene, uploadBigUrl))
2019-01-03 10:08:01 +08:00
} else {
w.Write([]byte("web upload deny"))
}
2017-09-09 16:40:55 +08:00
}
2018-05-10 18:19:04 +08:00
func init() {
2019-02-15 17:48:55 +08:00
DOCKER_DIR = os.Getenv("GO_FASTDFS_DIR")
if DOCKER_DIR != "" {
if !strings.HasSuffix(DOCKER_DIR, "/") {
DOCKER_DIR = DOCKER_DIR + "/"
}
}
2019-02-18 17:32:38 +08:00
STORE_DIR = DOCKER_DIR + STORE_DIR_NAME
CONF_DIR = DOCKER_DIR + CONF_DIR_NAME
DATA_DIR = DOCKER_DIR + DATA_DIR_NAME
LOG_DIR = DOCKER_DIR + LOG_DIR_NAME
2019-02-26 16:57:41 +08:00
STATIC_DIR = DOCKER_DIR + STATIC_DIR_NAME
2019-02-15 17:48:55 +08:00
LARGE_DIR_NAME = "haystack"
LARGE_DIR = STORE_DIR + "/haystack"
2019-02-16 02:58:33 +08:00
CONST_LEVELDB_FILE_NAME = DATA_DIR + "/fileserver.db"
2019-02-24 14:09:06 +08:00
CONST_LOG_LEVELDB_FILE_NAME = DATA_DIR + "/log.db"
2019-02-15 17:48:55 +08:00
CONST_STAT_FILE_NAME = DATA_DIR + "/stat.json"
CONST_CONF_FILE_NAME = CONF_DIR + "/cfg.json"
2019-02-26 16:57:41 +08:00
FOLDERS = []string{DATA_DIR, STORE_DIR, CONF_DIR, STATIC_DIR}
2019-02-15 17:48:55 +08:00
logAccessConfigStr = strings.Replace(logAccessConfigStr, "{DOCKER_DIR}", DOCKER_DIR, -1)
logConfigStr = strings.Replace(logConfigStr, "{DOCKER_DIR}", DOCKER_DIR, -1)
2018-12-30 17:17:40 +08:00
for _, folder := range FOLDERS {
2019-02-16 02:58:33 +08:00
os.MkdirAll(folder, 0775)
2018-12-30 17:17:40 +08:00
}
2019-02-16 10:24:46 +08:00
server = NewServer()
2018-12-30 17:17:40 +08:00
flag.Parse()
2019-02-01 11:31:19 +08:00
peerId := fmt.Sprintf("%d", server.util.RandInt(0, 9))
2019-01-18 10:32:25 +08:00
if !server.util.FileExists(CONST_CONF_FILE_NAME) {
peer := "http://" + server.util.GetPulicIP() + ":8080"
2019-02-11 16:02:34 +08:00
cfg := fmt.Sprintf(cfgJson, peerId, peer, peer)
2019-01-18 10:32:25 +08:00
server.util.WriteFile(CONST_CONF_FILE_NAME, cfg)
2019-01-01 14:41:57 +08:00
}
2018-12-30 18:18:42 +08:00
if logger, err := log.LoggerFromConfigAsBytes([]byte(logConfigStr)); err != nil {
panic(err)
} else {
log.ReplaceLogger(logger)
}
if _logacc, err := log.LoggerFromConfigAsBytes([]byte(logAccessConfigStr)); err == nil {
logacc = _logacc
log.Info("succes init log access")
} else {
log.Error(err.Error())
}
2019-01-01 14:41:57 +08:00
ParseConfig(CONST_CONF_FILE_NAME)
2019-01-17 01:05:27 +08:00
if Config().QueueSize == 0 {
Config().QueueSize = CONST_QUEUE_SIZE
}
2019-02-01 11:31:19 +08:00
if Config().PeerId == "" {
Config().PeerId = peerId
}
2019-01-07 17:19:06 +08:00
staticHandler = http.StripPrefix("/"+Config().Group+"/", http.FileServer(http.Dir(STORE_DIR)))
2019-01-18 10:32:25 +08:00
server.initComponent(false)
2017-09-09 16:40:55 +08:00
}
2019-02-13 10:09:41 +08:00
func (this *Server) test() {
2019-02-14 19:02:51 +08:00
testLock := func() {
tt := func(i int) {
if server.lockMap.IsLock("xx") {
return
}
server.lockMap.LockKey("xx")
defer server.lockMap.UnLockKey("xx")
//time.Sleep(time.Nanosecond*1)
fmt.Println("xx", i)
2019-02-12 17:22:47 +08:00
}
2019-02-14 19:02:51 +08:00
for i := 0; i < 10000; i++ {
go tt(i)
}
time.Sleep(time.Second * 3)
go tt(999999)
go tt(999999)
go tt(999999)
2019-02-12 17:22:47 +08:00
}
2019-02-14 19:02:51 +08:00
_ = testLock
testFile := func() {
var (
err error
f *os.File
)
f, err = os.OpenFile("tt", os.O_CREATE|os.O_RDWR, 0777)
if err != nil {
fmt.Println(err)
}
2019-02-15 11:35:49 +08:00
f.WriteAt([]byte("1"), 100)
2019-02-14 19:02:51 +08:00
f.Seek(0, 2)
f.Write([]byte("2"))
//fmt.Println(f.Seek(0, 2))
//fmt.Println(f.Seek(3, 2))
//fmt.Println(f.Seek(3, 0))
//fmt.Println(f.Seek(3, 1))
//fmt.Println(f.Seek(3, 0))
//f.Write([]byte("1"))
}
_ = testFile
//testFile()
2019-02-12 17:22:47 +08:00
}
2019-02-17 12:06:19 +08:00
func (this *Server) initTus() {
2019-02-18 17:32:38 +08:00
var (
err error
fileLog *os.File
2019-02-19 11:24:06 +08:00
bigDir string
2019-02-18 17:32:38 +08:00
)
BIG_DIR := STORE_DIR + "/_big/" + Config().PeerId
os.MkdirAll(BIG_DIR, 0775)
2019-02-19 16:14:31 +08:00
os.MkdirAll(LOG_DIR, 0775)
2019-02-17 12:06:19 +08:00
store := filestore.FileStore{
2019-02-18 17:32:38 +08:00
Path: BIG_DIR,
}
if fileLog, err = os.OpenFile(LOG_DIR+"/tusd.log", os.O_CREATE|os.O_RDWR, 0666); err != nil {
log.Error(err)
panic("initTus")
}
go func() {
for {
if fi, err := fileLog.Stat(); err != nil {
log.Error(err)
} else {
2019-02-19 16:14:31 +08:00
if fi.Size() > 1024*1024*500 { //500M
2019-02-18 17:32:38 +08:00
this.util.CopyFile(LOG_DIR+"/tusd.log", LOG_DIR+"/tusd.log.2")
fileLog.Seek(0, 0)
2019-02-19 11:24:06 +08:00
fileLog.Truncate(0)
fileLog.Seek(0, 2)
2019-02-18 17:32:38 +08:00
}
}
time.Sleep(time.Second * 30)
}
}()
l := slog.New(fileLog, "[tusd] ", slog.LstdFlags)
2019-02-19 11:24:06 +08:00
bigDir = CONST_BIG_UPLOAD_PATH_SUFFIX
2019-02-18 17:32:38 +08:00
if Config().SupportGroupManage {
2019-02-19 11:24:06 +08:00
bigDir = fmt.Sprintf("/%s%s", Config().Group, CONST_BIG_UPLOAD_PATH_SUFFIX)
2019-02-17 12:06:19 +08:00
}
composer := tusd.NewStoreComposer()
2019-02-20 20:14:21 +08:00
// support raw tus upload and download
store.GetReaderExt = func(id string) (io.Reader, error) {
var (
offset int64
err error
length int
buffer []byte
fi *FileInfo
)
if fi, err = this.GetFileInfoFromLevelDB(id); err != nil {
2019-02-20 11:57:32 +08:00
log.Error(err)
return nil, err
} else {
fp := DOCKER_DIR + fi.Path + "/" + fi.ReName
if this.util.FileExists(fp) {
2019-02-20 20:14:21 +08:00
log.Info(fmt.Sprintf("download:%s", fp))
2019-02-20 11:57:32 +08:00
return os.Open(fp)
}
2019-02-20 20:14:21 +08:00
ps := strings.Split(fp, ",")
if len(ps) > 2 && this.util.FileExists(ps[0]) {
if length, err = strconv.Atoi(ps[2]); err != nil {
return nil, err
}
if offset, err = strconv.ParseInt(ps[1], 10, 64); err != nil {
return nil, err
}
if buffer, err = this.util.ReadFileByOffSet(ps[0], offset, length); err != nil {
return nil, err
}
if buffer[0] == '1' {
bufferReader := bytes.NewBuffer(buffer[1:])
return bufferReader, nil
} else {
msg := "data no sync"
log.Error(msg)
return nil, errors.New(msg)
}
}
2019-02-20 11:57:32 +08:00
return nil, errors.New(fmt.Sprintf("%s not found", fp))
}
2019-02-20 20:14:21 +08:00
}
2019-02-17 12:06:19 +08:00
store.UseIn(composer)
handler, err := tusd.NewHandler(tusd.Config{
2019-02-18 17:32:38 +08:00
Logger: l,
BasePath: bigDir,
StoreComposer: composer,
NotifyCompleteUploads: true,
2019-02-17 12:06:19 +08:00
})
2019-02-18 17:32:38 +08:00
notify := func(handler *tusd.Handler) {
for {
select {
case info := <-handler.CompleteUploads:
log.Info("CompleteUploads", info)
name := ""
if v, ok := info.MetaData["filename"]; ok {
name = v
}
var err error
md5sum := ""
oldFullPath := BIG_DIR + "/" + info.ID + ".bin"
2019-02-19 11:36:57 +08:00
infoFullPath := BIG_DIR + "/" + info.ID + ".info"
2019-02-18 17:32:38 +08:00
if md5sum, err = this.util.GetFileSumByName(oldFullPath, Config().FileSumArithmetic); err != nil {
log.Error(err)
continue
}
2019-02-19 19:08:09 +08:00
ext := path.Ext(name)
filename := md5sum + ext
2019-02-19 11:36:57 +08:00
timeStamp := time.Now().Unix()
2019-02-19 19:08:09 +08:00
fpath := time.Now().Format("/20060102/15/04/")
newFullPath := STORE_DIR + "/" + Config().DefaultScene + fpath + Config().PeerId + "/" + filename
2019-02-18 17:32:38 +08:00
if fi, err := this.GetFileInfoFromLevelDB(md5sum); err != nil {
log.Error(err)
} else {
if fi.Md5 != "" {
2019-02-24 14:09:06 +08:00
if _, err := this.SaveFileInfoToLevelDB(info.ID, fi, this.ldb); err != nil {
2019-02-20 17:32:13 +08:00
log.Error(err)
}
2019-02-18 17:32:38 +08:00
log.Info(fmt.Sprintf("file is found md5:%s", fi.Md5))
2019-02-19 16:14:31 +08:00
log.Info("remove file:", oldFullPath)
log.Info("remove file:", infoFullPath)
2019-02-19 11:36:57 +08:00
os.Remove(oldFullPath)
os.Remove(infoFullPath)
2019-02-18 17:32:38 +08:00
continue
}
}
2019-02-19 19:08:09 +08:00
fpath = STORE_DIR_NAME + "/" + Config().DefaultScene + fpath + Config().PeerId
2019-02-22 10:24:36 +08:00
os.MkdirAll(DOCKER_DIR+fpath, 0775)
2019-02-18 17:32:38 +08:00
fileInfo := &FileInfo{
Name: name,
2019-02-19 19:08:09 +08:00
Path: fpath,
ReName: filename,
2019-02-18 17:32:38 +08:00
Size: info.Size,
TimeStamp: timeStamp,
Md5: md5sum,
Peers: []string{this.host},
OffSet: -1,
}
if err = os.Rename(oldFullPath, newFullPath); err != nil {
log.Error(err)
continue
}
2019-02-28 11:59:34 +08:00
log.Info(fileInfo)
2019-02-18 17:32:38 +08:00
os.Remove(infoFullPath)
2019-02-25 16:24:28 +08:00
if _, err = this.SaveFileInfoToLevelDB(info.ID, fileInfo, this.ldb); err != nil { //assosiate file id
log.Error(err)
}
2019-02-18 17:32:38 +08:00
this.SaveFileMd5Log(fileInfo, CONST_FILE_Md5_FILE_NAME)
2019-02-24 18:23:52 +08:00
go this.postFileToPeer(fileInfo)
2019-02-18 17:32:38 +08:00
}
}
}
go notify(handler)
2019-02-17 12:06:19 +08:00
if err != nil {
log.Error(err)
}
2019-02-18 17:32:38 +08:00
http.Handle(bigDir, http.StripPrefix(bigDir, handler))
2019-02-17 12:06:19 +08:00
}
2019-02-15 17:48:55 +08:00
func (this *Server) FormatStatInfo() {
2019-01-02 17:46:30 +08:00
var (
data []byte
2019-02-15 17:48:55 +08:00
err error
2019-01-02 17:46:30 +08:00
count int64
2019-02-15 17:48:55 +08:00
stat map[string]interface{}
)
if this.util.FileExists(CONST_STAT_FILE_NAME) {
if data, err = this.util.ReadBinFile(CONST_STAT_FILE_NAME); err != nil {
log.Error(err)
} else {
if err = json.Unmarshal(data, &stat); err != nil {
log.Error(err)
} else {
for k, v := range stat {
switch v.(type) {
case float64:
vv := strings.Split(fmt.Sprintf("%f", v), ".")[0]
if count, err = strconv.ParseInt(vv, 10, 64); err != nil {
log.Error(err)
} else {
this.statMap.Put(k, count)
}
default:
this.statMap.Put(k, v)
}
}
}
}
} else {
2019-02-23 23:01:40 +08:00
this.RepairStatByDate(this.util.GetToDay())
2019-02-15 17:48:55 +08:00
}
}
func (this *Server) initComponent(isReload bool) {
var (
ip string
2019-01-02 17:46:30 +08:00
)
2019-01-18 10:32:25 +08:00
ip = this.util.GetPulicIP()
2019-02-01 11:31:19 +08:00
if Config().Host == "" {
2019-01-21 16:58:16 +08:00
if len(strings.Split(Config().Addr, ":")) == 2 {
server.host = fmt.Sprintf("http://%s:%s", ip, strings.Split(Config().Addr, ":")[1])
2019-02-11 16:02:34 +08:00
Config().Host = server.host
2019-01-21 15:07:42 +08:00
}
2019-02-01 11:31:19 +08:00
} else {
2019-02-11 16:02:34 +08:00
server.host = Config().Host
2019-01-21 15:07:42 +08:00
}
2018-12-30 17:17:40 +08:00
ex, _ := regexp.Compile("\\d+\\.\\d+\\.\\d+\\.\\d+")
2019-01-01 14:41:57 +08:00
var peers []string
for _, peer := range Config().Peers {
2019-01-19 10:35:42 +08:00
if this.util.Contains(ip, ex.FindAllString(peer, -1)) ||
2019-01-20 11:05:22 +08:00
this.util.Contains("127.0.0.1", ex.FindAllString(peer, -1)) {
2019-01-01 14:41:57 +08:00
continue
}
if strings.HasPrefix(peer, "http") {
peers = append(peers, peer)
} else {
peers = append(peers, "http://"+peer)
2018-12-30 17:17:40 +08:00
}
}
2019-01-01 14:41:57 +08:00
Config().Peers = peers
2019-02-01 11:31:19 +08:00
if !isReload {
2019-02-15 17:48:55 +08:00
this.FormatStatInfo()
2019-02-17 12:06:19 +08:00
this.initTus()
2019-01-16 10:28:51 +08:00
}
//Timer
2019-02-17 12:06:19 +08:00
// int tus
2018-12-30 17:17:40 +08:00
}
2018-12-30 18:18:42 +08:00
type HttpHandler struct {
}
2018-05-10 18:19:04 +08:00
2018-12-30 18:18:42 +08:00
func (HttpHandler) ServeHTTP(res http.ResponseWriter, req *http.Request) {
status_code := "200"
defer func(t time.Time) {
logStr := fmt.Sprintf("[Access] %s | %v | %s | %s | %s | %s |%s",
time.Now().Format("2006/01/02 - 15:04:05"),
res.Header(),
time.Since(t).String(),
2019-01-18 10:32:25 +08:00
server.util.GetClientIp(req),
2018-12-30 18:18:42 +08:00
req.Method,
status_code,
req.RequestURI,
)
logacc.Info(logStr)
}(time.Now())
defer func() {
if err := recover(); err != nil {
status_code = "500"
res.WriteHeader(500)
print(err)
buff := debug.Stack()
log.Error(err)
log.Error(string(buff))
}
}()
http.DefaultServeMux.ServeHTTP(res, req)
}
2019-01-16 10:28:51 +08:00
func (this *Server) Main() {
2018-12-30 17:17:40 +08:00
go func() {
for {
2019-01-20 11:05:22 +08:00
this.CheckFileAndSendToPeer(this.util.GetToDay(), CONST_Md5_ERROR_FILE_NAME, false)
2019-01-20 23:45:16 +08:00
//fmt.Println("CheckFileAndSendToPeer")
2019-01-02 17:51:08 +08:00
time.Sleep(time.Second * time.Duration(Config().RefreshInterval))
2019-01-21 15:07:42 +08:00
//this.util.RemoveEmptyDir(STORE_DIR)
2018-12-30 17:17:40 +08:00
}
}()
2019-02-25 10:22:51 +08:00
go this.CleanAndBackUp()
2019-03-01 10:10:55 +08:00
go this.CheckClusterStatus()
go this.LoadQueueSendToPeer()
go this.ConsumerPostToPeer()
2019-02-23 10:07:00 +08:00
go this.ConsumerLog()
2019-01-21 15:07:42 +08:00
go this.ConsumerDownLoad()
2019-02-27 17:13:25 +08:00
if Config().EnableMigrate {
go this.RepairFileInfoFromFile()
}
2019-01-19 20:44:54 +08:00
if Config().AutoRepair {
go func() {
for {
2019-01-20 11:05:22 +08:00
time.Sleep(time.Minute * 3)
2019-01-19 20:44:54 +08:00
this.AutoRepair(false)
2019-01-20 11:05:22 +08:00
time.Sleep(time.Minute * 60)
2019-01-19 20:44:54 +08:00
}
}()
}
2019-02-26 18:51:39 +08:00
groupRoute := ""
2019-02-11 16:02:34 +08:00
if Config().SupportGroupManage {
2019-02-26 18:51:39 +08:00
groupRoute = "/" + Config().Group
2019-02-02 13:09:25 +08:00
}
2019-02-26 18:51:39 +08:00
if groupRoute == "" {
http.HandleFunc(fmt.Sprintf("%s", "/"), this.Index)
2019-02-26 16:57:41 +08:00
} else {
2019-02-26 18:51:39 +08:00
http.HandleFunc(fmt.Sprintf("%s", groupRoute), this.Index)
2019-02-26 16:57:41 +08:00
}
http.HandleFunc(fmt.Sprintf("%s/check_file_exist", groupRoute), this.CheckFileExist)
http.HandleFunc(fmt.Sprintf("%s/upload", groupRoute), this.Upload)
http.HandleFunc(fmt.Sprintf("%s/delete", groupRoute), this.RemoveFile)
http.HandleFunc(fmt.Sprintf("%s/sync", groupRoute), this.Sync)
http.HandleFunc(fmt.Sprintf("%s/stat", groupRoute), this.Stat)
http.HandleFunc(fmt.Sprintf("%s/repair_stat", groupRoute), this.RepairStatWeb)
http.HandleFunc(fmt.Sprintf("%s/status", groupRoute), this.Status)
http.HandleFunc(fmt.Sprintf("%s/repair", groupRoute), this.Repair)
http.HandleFunc(fmt.Sprintf("%s/report", groupRoute), this.Report)
http.HandleFunc(fmt.Sprintf("%s/backup", groupRoute), this.BackUp)
http.HandleFunc(fmt.Sprintf("%s/remove_empty_dir", groupRoute), this.RemoveEmptyDir)
http.HandleFunc(fmt.Sprintf("%s/repair_fileinfo", groupRoute), this.RepairFileInfo)
http.HandleFunc(fmt.Sprintf("%s/reload", groupRoute), this.Reload)
http.HandleFunc(fmt.Sprintf("%s/syncfile_info", groupRoute), this.SyncFileInfo)
http.HandleFunc(fmt.Sprintf("%s/get_md5s_by_date", groupRoute), this.GetMd5sForWeb)
http.HandleFunc(fmt.Sprintf("%s/receive_md5s", groupRoute), this.ReceiveMd5s)
2019-01-16 10:28:51 +08:00
http.HandleFunc("/"+Config().Group+"/", this.Download)
2019-01-01 14:41:57 +08:00
fmt.Println("Listen on " + Config().Addr)
2019-01-04 10:22:03 +08:00
err := http.ListenAndServe(Config().Addr, new(HttpHandler))
log.Error(err)
fmt.Println(err)
2019-01-16 10:28:51 +08:00
}
func main() {
server.Main()
2017-09-09 16:40:55 +08:00
}