go-fastdfs/fileserver.go

2224 lines
45 KiB
Go
Raw Normal View History

2017-09-09 16:40:55 +08:00
package main
import (
2018-05-10 13:31:34 +08:00
"crypto/md5"
"crypto/rand"
"encoding/base64"
2018-12-30 17:17:40 +08:00
"encoding/json"
"errors"
2018-05-10 18:19:04 +08:00
"flag"
2017-09-09 16:40:55 +08:00
"fmt"
"io"
2018-12-30 17:17:40 +08:00
"io/ioutil"
"mime/multipart"
"net"
2018-05-10 13:31:34 +08:00
"net/http"
2019-01-17 01:05:27 +08:00
_ "net/http/pprof"
2019-01-09 12:05:20 +08:00
"net/smtp"
2019-01-02 20:37:50 +08:00
"net/url"
2019-01-09 12:05:20 +08:00
"os/signal"
2019-01-01 23:31:14 +08:00
"path"
2018-12-31 17:46:24 +08:00
"path/filepath"
2018-12-30 17:17:40 +08:00
"reflect"
2018-12-30 18:18:42 +08:00
"runtime/debug"
2019-01-02 17:46:30 +08:00
"strconv"
2019-01-09 12:05:20 +08:00
"syscall"
2019-01-16 15:30:53 +08:00
2019-01-02 17:46:30 +08:00
// "strconv"
"sync"
2018-12-30 17:17:40 +08:00
2017-09-09 16:40:55 +08:00
"os"
2018-12-30 17:17:40 +08:00
"regexp"
2018-05-10 13:31:34 +08:00
"strings"
2018-12-30 17:17:40 +08:00
"sync/atomic"
2018-05-10 13:31:34 +08:00
"time"
2018-12-30 17:17:40 +08:00
"unsafe"
"github.com/astaxie/beego/httplib"
"github.com/syndtr/goleveldb/leveldb"
2018-05-10 13:31:34 +08:00
log "github.com/sjqzhang/seelog"
)
2018-05-10 18:19:04 +08:00
var staticHandler http.Handler
var util = &Common{}
var server = &Server{}
2019-01-02 17:46:30 +08:00
var statMap = &CommonMap{m: make(map[string]interface{})}
2018-12-30 17:17:40 +08:00
2019-01-17 01:05:27 +08:00
var queueToPeers = make(chan FileInfo, CONST_QUEUE_SIZE)
2018-12-30 18:18:42 +08:00
var logacc log.LoggerInterface
2019-01-01 14:41:57 +08:00
var FOLDERS = []string{DATA_DIR, STORE_DIR, CONF_DIR}
2018-12-30 17:17:40 +08:00
2019-01-17 01:05:27 +08:00
var CONST_QUEUE_SIZE = 10000
2018-12-30 17:17:40 +08:00
var (
FileName string
ptr unsafe.Pointer
)
2018-05-10 13:31:34 +08:00
const (
2018-05-10 18:19:04 +08:00
STORE_DIR = "files"
2018-12-30 17:17:40 +08:00
CONF_DIR = "conf"
DATA_DIR = "data"
CONST_LEVELDB_FILE_NAME = DATA_DIR + "/fileserver.db"
2019-01-02 17:46:30 +08:00
CONST_STAT_FILE_NAME = DATA_DIR + "/stat.json"
2019-01-01 14:41:57 +08:00
CONST_CONF_FILE_NAME = CONF_DIR + "/cfg.json"
2019-01-02 17:46:30 +08:00
CONST_STAT_FILE_COUNT_KEY = "fileCount"
CONST_STAT_FILE_TOTAL_SIZE_KEY = "totalSize"
2019-01-03 10:08:01 +08:00
CONST_Md5_ERROR_FILE_NAME = "errors.md5"
CONST_FILE_Md5_FILE_NAME = "files.md5"
2018-12-30 17:17:40 +08:00
2019-01-03 22:56:46 +08:00
cfgJson = `{
"绑定端号": "端口",
"addr": ":8080",
"集群": "集群列表",
"peers": ["%s"],
"组号": "组号",
"group": "group1",
"refresh_interval": 120,
"是否自动重命名": "真假",
"rename_file": false,
"是否支持WEB上专": "真假",
"enable_web_upload": true,
2019-01-04 10:22:03 +08:00
"是否支持非日期路径": "真假",
"enable_custom_path": true,
2019-01-03 22:56:46 +08:00
"下载域名": "",
"download_domain": "",
2019-01-08 10:23:13 +08:00
"场景":"场景列表",
"scenes":[],
"默认场景":"",
"default_scene":"default",
2019-01-03 22:56:46 +08:00
"是否显示目录": "真假",
2019-01-09 12:05:20 +08:00
"show_dir": true,
2019-01-09 14:57:43 +08:00
"邮件配置":"",
2019-01-09 12:05:20 +08:00
"mail":{
"user":"abc@163.com",
"password":"abc",
"host":"smtp.163.com:25"
},
2019-01-09 14:57:43 +08:00
"告警接收邮件列表":"",
2019-01-09 14:48:15 +08:00
"alram_receivers":[],
2019-01-09 14:57:43 +08:00
"告警接收URL":"",
2019-01-09 17:01:01 +08:00
"alarm_url":"",
"下载是否需带token":"真假",
"download_use_token":false,
"下载token过期时间":"",
2019-01-17 22:03:53 +08:00
"download_token_expire":600,
"是否自动修复":"可能存在性问题(每小时一次)",
"auto_repair":true
2019-01-09 17:01:01 +08:00
2019-01-01 14:41:57 +08:00
}
2018-12-30 17:17:40 +08:00
`
2018-05-10 13:31:34 +08:00
logConfigStr = `
<seelog type="asynctimer" asyncinterval="1000" minlevel="trace" maxlevel="error">
<outputs formatid="common">
<buffered formatid="common" size="1048576" flushperiod="1000">
<rollingfile type="size" filename="./log/fileserver.log" maxsize="104857600" maxrolls="10"/>
</buffered>
</outputs>
<formats>
<format id="common" format="%Date %Time [%LEV] [%File:%Line] [%Func] %Msg%n" />
</formats>
</seelog>
2018-12-30 18:18:42 +08:00
`
logAccessConfigStr = `
<seelog type="asynctimer" asyncinterval="1000" minlevel="trace" maxlevel="error">
<outputs formatid="common">
<buffered formatid="common" size="1048576" flushperiod="1000">
<rollingfile type="size" filename="./log/access.log" maxsize="104857600" maxrolls="10"/>
</buffered>
</outputs>
<formats>
<format id="common" format="%Date %Time [%LEV] [%File:%Line] [%Func] %Msg%n" />
</formats>
</seelog>
2018-05-10 13:31:34 +08:00
`
2017-09-09 16:40:55 +08:00
)
2018-05-10 13:31:34 +08:00
type Common struct {
}
2018-05-10 18:19:04 +08:00
type Server struct {
2019-01-16 10:28:51 +08:00
ldb *leveldb.DB
2018-12-30 17:17:40 +08:00
util *Common
}
type FileInfo struct {
2019-01-16 15:30:53 +08:00
Name string
ReName string
Path string
Md5 string
Size int64
Peers []string
Scene string
TimeStamp int64
2018-12-30 17:17:40 +08:00
}
2019-01-09 13:13:21 +08:00
type Status struct {
Message string `json:"message"`
Status string `json:"status"`
}
2019-01-08 16:53:03 +08:00
type FileResult struct {
2019-01-09 16:04:15 +08:00
Url string `json:"url"`
Md5 string `json:"md5"`
Path string `json:"path"`
Domain string `json:"domain"`
Scene string `json:"scene"`
//Just for Compatibility
Scenes string `json:"scenes"`
Retmsg string `json:"retmsg"`
Retcode int `json:"retcode"`
Src string `json:"src"`
2019-01-08 16:53:03 +08:00
}
2019-01-09 12:05:20 +08:00
type Mail struct {
User string `json:"user"`
Password string `json:"password"`
Host string `json:"host"`
}
type StatDateFileInfo struct {
Date string `json:"date"`
TotalSize int64 `json:"totalSize"`
FileCount int64 `json:"fileCount"`
}
2018-12-30 17:17:40 +08:00
type GloablConfig struct {
2019-01-09 17:01:01 +08:00
Addr string `json:"addr"`
Peers []string `json:"peers"`
Group string `json:"group"`
RenameFile bool `json:"rename_file"`
ShowDir bool `json:"show_dir"`
RefreshInterval int `json:"refresh_interval"`
EnableWebUpload bool `json:"enable_web_upload"`
DownloadDomain string `json:"download_domain"`
EnableCustomPath bool `json:"enable_custom_path"`
Scenes []string `json:"scenes"`
AlramReceivers []string `json:"alram_receivers"`
DefaultScene string `json:"default_scene"`
Mail Mail `json:"mail"`
AlarmUrl string `json:"alarm_url"`
DownloadUseToken bool `json:"download_use_token"`
DownloadTokenExpire int `json:"download_token_expire"`
2019-01-17 01:05:27 +08:00
QueueSize int `json:"queue_size"`
2019-01-17 22:03:53 +08:00
AutoRepair bool `json:"auto_repair"`
2019-01-01 14:41:57 +08:00
}
2019-01-02 17:46:30 +08:00
type CommonMap struct {
sync.Mutex
m map[string]interface{}
}
func (s *CommonMap) GetValue(k string) (interface{}, bool) {
s.Lock()
defer s.Unlock()
v, ok := s.m[k]
return v, ok
}
func (s *CommonMap) Put(k string, v interface{}) {
s.Lock()
defer s.Unlock()
s.m[k] = v
}
func (s *CommonMap) AddCount(key string, count int) {
s.Lock()
defer s.Unlock()
if _v, ok := s.m[key]; ok {
v := _v.(int)
v = v + count
s.m[key] = v
} else {
s.m[key] = 1
}
}
func (s *CommonMap) AddCountInt64(key string, count int64) {
s.Lock()
defer s.Unlock()
if _v, ok := s.m[key]; ok {
v := _v.(int64)
v = v + count
s.m[key] = v
} else {
s.m[key] = count
}
}
func (s *CommonMap) Add(key string) {
s.Lock()
defer s.Unlock()
if _v, ok := s.m[key]; ok {
v := _v.(int)
v = v + 1
s.m[key] = v
} else {
s.m[key] = 1
}
}
func (s *CommonMap) Zero() {
s.Lock()
defer s.Unlock()
for k, _ := range s.m {
s.m[k] = 0
}
}
func (s *CommonMap) Get() map[string]interface{} {
s.Lock()
defer s.Unlock()
m := make(map[string]interface{})
for k, v := range s.m {
m[k] = v
}
return m
}
2019-01-01 14:41:57 +08:00
func Config() *GloablConfig {
return (*GloablConfig)(atomic.LoadPointer(&ptr))
2018-12-30 17:17:40 +08:00
}
func ParseConfig(filePath string) {
var (
data []byte
)
if filePath == "" {
data = []byte(strings.TrimSpace(cfgJson))
} else {
file, err := os.Open(filePath)
if err != nil {
panic(fmt.Sprintln("open file path:", filePath, "error:", err))
}
defer file.Close()
FileName = filePath
data, err = ioutil.ReadAll(file)
if err != nil {
panic(fmt.Sprintln("file path:", filePath, " read all error:", err))
}
}
var c GloablConfig
if err := json.Unmarshal(data, &c); err != nil {
panic(fmt.Sprintln("file path:", filePath, "json unmarshal error:", err))
}
log.Info(c)
atomic.StorePointer(&ptr, unsafe.Pointer(&c))
log.Info("config parse success")
2018-05-10 18:19:04 +08:00
}
2018-05-10 13:31:34 +08:00
func (this *Common) GetUUID() string {
b := make([]byte, 48)
if _, err := io.ReadFull(rand.Reader, b); err != nil {
return ""
}
2018-05-10 13:31:34 +08:00
id := this.MD5(base64.URLEncoding.EncodeToString(b))
return fmt.Sprintf("%s-%s-%s-%s-%s", id[0:8], id[8:12], id[12:16], id[16:20], id[20:])
}
2019-01-09 12:05:20 +08:00
func (this *Common) GetToDay() string {
return time.Now().Format("20060102")
}
2018-12-30 17:17:40 +08:00
func (this *Common) GetPulicIP() string {
conn, _ := net.Dial("udp", "8.8.8.8:80")
defer conn.Close()
localAddr := conn.LocalAddr().String()
idx := strings.LastIndex(localAddr, ":")
return localAddr[0:idx]
}
2018-05-10 13:31:34 +08:00
func (this *Common) MD5(str string) string {
md := md5.New()
md.Write([]byte(str))
return fmt.Sprintf("%x", md.Sum(nil))
}
2018-12-30 17:17:40 +08:00
func (this *Common) GetFileMd5(file *os.File) string {
file.Seek(0, 0)
md5h := md5.New()
io.Copy(md5h, file)
sum := fmt.Sprintf("%x", md5h.Sum(nil))
return sum
}
func (this *Common) Contains(obj interface{}, arrayobj interface{}) bool {
targetValue := reflect.ValueOf(arrayobj)
switch reflect.TypeOf(arrayobj).Kind() {
case reflect.Slice, reflect.Array:
for i := 0; i < targetValue.Len(); i++ {
if targetValue.Index(i).Interface() == obj {
return true
}
}
case reflect.Map:
if targetValue.MapIndex(reflect.ValueOf(obj)).IsValid() {
return true
}
}
return false
}
2018-05-10 18:19:04 +08:00
func (this *Common) FileExists(fileName string) bool {
_, err := os.Stat(fileName)
return err == nil
}
2019-01-01 14:41:57 +08:00
func (this *Common) WriteFile(path string, data string) bool {
if err := ioutil.WriteFile(path, []byte(data), 0666); err == nil {
return true
} else {
return false
}
}
func (this *Common) WriteBinFile(path string, data []byte) bool {
if err := ioutil.WriteFile(path, data, 0666); err == nil {
return true
} else {
return false
}
}
2019-01-02 17:46:30 +08:00
func (this *Common) IsExist(filename string) bool {
_, err := os.Stat(filename)
return err == nil || os.IsExist(err)
}
2019-01-16 15:30:53 +08:00
func (this *Common) Match(matcher string, content string) []string {
var result []string
if reg, err := regexp.Compile(matcher); err == nil {
result = reg.FindAllString(content, -1)
}
return result
}
2019-01-02 17:46:30 +08:00
func (this *Common) ReadBinFile(path string) ([]byte, error) {
if this.IsExist(path) {
fi, err := os.Open(path)
if err != nil {
return nil, err
}
defer fi.Close()
return ioutil.ReadAll(fi)
} else {
return nil, errors.New("not found")
}
}
2018-12-31 17:46:24 +08:00
func (this *Common) RemoveEmptyDir(pathname string) {
2019-01-02 17:46:30 +08:00
defer func() {
if re := recover(); re != nil {
buffer := debug.Stack()
log.Error("postFileToPeer")
log.Error(re)
log.Error(string(buffer))
}
}()
2018-12-31 17:46:24 +08:00
handlefunc := func(file_path string, f os.FileInfo, err error) error {
if f.IsDir() {
files, _ := ioutil.ReadDir(file_path)
2019-01-02 17:46:30 +08:00
if len(files) == 0 && file_path != pathname {
2018-12-31 17:46:24 +08:00
os.Remove(file_path)
}
}
return nil
}
fi, _ := os.Stat(pathname)
if fi.IsDir() {
filepath.Walk(pathname, handlefunc)
}
}
2019-01-17 21:46:29 +08:00
func (this *Common) JsonEncodePretty(o interface{}) string {
resp := ""
switch o.(type) {
case map[string]interface{}:
if data, err := json.Marshal(o); err == nil {
resp = string(data)
}
case map[string]string:
if data, err := json.Marshal(o); err == nil {
resp = string(data)
}
case []interface{}:
if data, err := json.Marshal(o); err == nil {
resp = string(data)
}
case []string:
if data, err := json.Marshal(o); err == nil {
resp = string(data)
}
case string:
resp = o.(string)
default:
if data, err := json.Marshal(o); err == nil {
resp = string(data)
}
}
var v interface{}
if ok := json.Unmarshal([]byte(resp), &v); ok == nil {
if buf, ok := json.MarshalIndent(v, "", " "); ok == nil {
resp = string(buf)
}
}
return resp
}
2018-12-30 18:18:42 +08:00
func (this *Common) GetClientIp(r *http.Request) string {
client_ip := ""
headers := []string{"X_Forwarded_For", "X-Forwarded-For", "X-Real-Ip",
"X_Real_Ip", "Remote_Addr", "Remote-Addr"}
for _, v := range headers {
if _v, ok := r.Header[v]; ok {
if len(_v) > 0 {
client_ip = _v[0]
break
}
}
}
if client_ip == "" {
clients := strings.Split(r.RemoteAddr, ":")
client_ip = clients[0]
}
return client_ip
}
2019-01-16 15:30:53 +08:00
func (this *Server) RepairStat() {
var (
size int64
count int64
)
defer func() {
if re := recover(); re != nil {
buffer := debug.Stack()
log.Error("RepairStat")
log.Error(re)
log.Error(string(buffer))
fmt.Println(re)
}
}()
handlefunc := func(file_path string, f os.FileInfo, err error) error {
var (
files []os.FileInfo
date []string
data []byte
content string
lines []string
count int64
totalSize int64
line string
cols []string
size int64
)
if f.IsDir() {
if files, err = ioutil.ReadDir(file_path); err != nil {
return err
}
for _, file := range files {
count = 0
size = 0
if file.Name() == CONST_FILE_Md5_FILE_NAME {
if data, err = ioutil.ReadFile(file_path + "/" + file.Name()); err != nil {
log.Error(err)
continue
}
date = this.util.Match("\\d{8}", file_path)
if len(date) < 1 {
continue
}
content = string(data)
lines = strings.Split(content, "\n")
count = int64(len(lines))
if count > 1 {
count = count - 1
}
for _, line = range lines {
cols = strings.Split(line, "|")
if len(cols) > 2 {
if size, err = strconv.ParseInt(cols[1], 10, 64); err != nil {
size = 0
continue
}
totalSize = totalSize + size
}
}
statMap.Put(date[0]+"_"+CONST_STAT_FILE_COUNT_KEY, count)
statMap.Put(date[0]+"_"+CONST_STAT_FILE_TOTAL_SIZE_KEY, totalSize)
statMap.AddCountInt64(CONST_STAT_FILE_COUNT_KEY, count)
statMap.AddCountInt64(CONST_STAT_FILE_TOTAL_SIZE_KEY, totalSize)
}
}
}
return nil
}
statMap.Put(CONST_STAT_FILE_COUNT_KEY, count)
statMap.Put(CONST_STAT_FILE_TOTAL_SIZE_KEY, size)
filepath.Walk(DATA_DIR, handlefunc)
}
2019-01-02 17:46:30 +08:00
func (this *Server) DownloadFromPeer(peer string, fileInfo *FileInfo) {
var (
err error
filename string
2019-01-08 18:24:51 +08:00
fpath string
fi os.FileInfo
2019-01-02 17:46:30 +08:00
)
if _, err = os.Stat(fileInfo.Path); err != nil {
os.MkdirAll(fileInfo.Path, 0777)
}
filename = fileInfo.Name
if fileInfo.ReName != "" {
filename = fileInfo.ReName
}
2019-01-07 17:19:06 +08:00
p := strings.Replace(fileInfo.Path, STORE_DIR+"/", "", 1)
req := httplib.Get(peer + "/" + Config().Group + "/" + p + "/" + filename)
2019-01-02 17:46:30 +08:00
2019-01-08 18:24:51 +08:00
fpath = fileInfo.Path + "/" + filename
2019-01-02 17:46:30 +08:00
req.SetTimeout(time.Second*5, time.Second*5)
2019-01-08 18:24:51 +08:00
if err = req.ToFile(fpath); err != nil {
2019-01-02 17:46:30 +08:00
log.Error(err)
}
2019-01-08 18:24:51 +08:00
if fi, err = os.Stat(fpath); err != nil {
os.Remove(fpath)
return
}
if fi.Size() == 0 {
os.Remove(fpath)
}
2019-01-02 17:46:30 +08:00
}
2018-05-10 18:19:04 +08:00
func (this *Server) Download(w http.ResponseWriter, r *http.Request) {
2019-01-01 23:31:14 +08:00
var (
2019-01-09 17:01:01 +08:00
err error
pathMd5 string
info os.FileInfo
peer string
fileInfo *FileInfo
fullpath string
pathval url.Values
token string
timestamp string
maxTimestamp int64
minTimestamp int64
ts int64
md5sum string
fp *os.File
2019-01-01 23:31:14 +08:00
)
2019-01-02 17:46:30 +08:00
2019-01-09 17:01:01 +08:00
r.ParseForm()
if Config().DownloadUseToken {
token = r.FormValue("token")
timestamp = r.FormValue("timestamp")
if token == "" || timestamp == "" {
w.Write([]byte("unvalid request"))
return
}
maxTimestamp = time.Now().Add(time.Second *
time.Duration(Config().DownloadTokenExpire)).Unix()
minTimestamp = time.Now().Add(-time.Second *
time.Duration(Config().DownloadTokenExpire)).Unix()
if ts, err = strconv.ParseInt(timestamp, 10, 64); err != nil {
w.Write([]byte("unvalid timestamp"))
return
}
if ts > maxTimestamp || ts < minTimestamp {
w.Write([]byte("timestamp expire"))
return
}
}
2019-01-02 17:46:30 +08:00
fullpath = r.RequestURI[len(Config().Group)+2 : len(r.RequestURI)]
2019-01-02 20:37:50 +08:00
2019-01-07 17:19:06 +08:00
fullpath = STORE_DIR + "/" + fullpath
2019-01-02 20:37:50 +08:00
if pathval, err = url.ParseQuery(fullpath); err != nil {
log.Error(err)
} else {
for k, _ := range pathval {
if k != "" {
fullpath = k
break
}
}
}
2019-01-09 17:01:01 +08:00
CheckToken := func(token string, md5sum string, timestamp string) bool {
if this.util.MD5(md5sum+timestamp) != token {
return false
}
return true
}
if Config().DownloadUseToken {
2019-01-09 22:11:40 +08:00
fullpath = strings.Split(fullpath, "?")[0]
2019-01-09 17:01:01 +08:00
pathMd5 = this.util.MD5(fullpath)
if fileInfo, err = this.GetFileInfoFromLevelDB(pathMd5); err != nil {
log.Error(err)
if this.util.FileExists(fullpath) {
if fp, err = os.Create(fullpath); err != nil {
log.Error(err)
}
if fp != nil {
defer fp.Close()
}
md5sum = this.util.GetFileMd5(fp)
if !CheckToken(token, md5sum, timestamp) {
w.Write([]byte("unvalid request,error token"))
return
}
}
} else {
if !CheckToken(token, fileInfo.Md5, timestamp) {
w.Write([]byte("unvalid request,error token"))
return
}
}
}
2019-01-01 23:31:14 +08:00
if info, err = os.Stat(fullpath); err != nil {
log.Error(err)
2019-01-02 17:46:30 +08:00
pathMd5 = this.util.MD5(fullpath)
for _, peer = range Config().Peers {
2019-01-02 18:09:02 +08:00
2019-01-02 17:46:30 +08:00
if fileInfo, err = this.checkPeerFileExist(peer, pathMd5); err != nil {
log.Error(err)
continue
}
2019-01-09 17:01:01 +08:00
2019-01-02 17:46:30 +08:00
if fileInfo.Md5 != "" {
2019-01-09 17:01:01 +08:00
if Config().DownloadUseToken {
if !CheckToken(token, fileInfo.Md5, timestamp) {
w.Write([]byte("unvalid request,error token"))
return
}
}
2019-01-02 17:46:30 +08:00
go this.DownloadFromPeer(peer, fileInfo)
http.Redirect(w, r, peer+r.RequestURI, 302)
2019-01-16 10:28:51 +08:00
return
2019-01-01 23:31:14 +08:00
}
2019-01-02 17:46:30 +08:00
2019-01-01 23:31:14 +08:00
}
2019-01-08 18:24:51 +08:00
w.WriteHeader(404)
2019-01-01 23:31:14 +08:00
return
}
if !Config().ShowDir && info.IsDir() {
2019-01-03 10:08:01 +08:00
w.Write([]byte("list dir deny"))
2019-01-01 23:31:14 +08:00
return
}
2018-05-10 18:19:04 +08:00
log.Info("download:" + r.RequestURI)
staticHandler.ServeHTTP(w, r)
}
2018-12-30 17:17:40 +08:00
func (this *Server) GetServerURI(r *http.Request) string {
return fmt.Sprintf("http://%s/", r.Host)
}
2018-12-30 23:31:42 +08:00
func (this *Server) CheckFileAndSendToPeer(filename string, is_force_upload bool) {
2018-12-30 17:17:40 +08:00
2018-12-30 18:18:42 +08:00
defer func() {
if re := recover(); re != nil {
buffer := debug.Stack()
log.Error("CheckFileAndSendToPeer")
log.Error(re)
log.Error(string(buffer))
}
}()
2018-12-30 17:17:40 +08:00
if filename == "" {
2019-01-03 10:08:01 +08:00
filename = DATA_DIR + "/" + time.Now().Format("20060102") + "/" + CONST_Md5_ERROR_FILE_NAME
2018-12-30 17:17:40 +08:00
}
if data, err := ioutil.ReadFile(filename); err == nil {
content := string(data)
2018-12-30 23:31:42 +08:00
2018-12-30 17:17:40 +08:00
lines := strings.Split(content, "\n")
for _, line := range lines {
cols := strings.Split(line, "|")
2018-12-30 23:31:42 +08:00
if fileInfo, _ := this.GetFileInfoByMd5(cols[0]); fileInfo != nil && fileInfo.Md5 != "" {
if is_force_upload {
fileInfo.Peers = []string{}
}
this.postFileToPeer(fileInfo, false)
2018-12-30 17:17:40 +08:00
}
}
}
}
2018-12-30 23:31:42 +08:00
func (this *Server) postFileToPeer(fileInfo *FileInfo, write_log bool) {
2018-12-30 17:17:40 +08:00
2019-01-02 17:46:30 +08:00
var (
err error
peer string
filename string
info *FileInfo
postURL string
result string
data []byte
2019-01-02 18:09:02 +08:00
fi os.FileInfo
2019-01-17 01:05:27 +08:00
i int
2019-01-02 17:46:30 +08:00
)
defer func() {
if re := recover(); re != nil {
buffer := debug.Stack()
log.Error("postFileToPeer")
log.Error(re)
log.Error(string(buffer))
2018-12-30 17:17:40 +08:00
}
2019-01-02 17:46:30 +08:00
}()
2019-01-17 01:05:27 +08:00
for i, peer = range Config().Peers {
_ = i
2019-01-02 17:46:30 +08:00
2018-12-30 17:17:40 +08:00
if fileInfo.Peers == nil {
fileInfo.Peers = []string{}
}
if this.util.Contains(peer, fileInfo.Peers) {
continue
}
2019-01-02 00:03:48 +08:00
2019-01-02 17:46:30 +08:00
filename = fileInfo.Name
2019-01-02 00:03:48 +08:00
if Config().RenameFile {
2019-01-02 17:46:30 +08:00
filename = fileInfo.ReName
2019-01-02 00:03:48 +08:00
}
2019-01-02 17:46:30 +08:00
if !this.util.FileExists(fileInfo.Path + "/" + filename) {
2018-12-30 17:17:40 +08:00
continue
2019-01-02 18:09:02 +08:00
} else {
if fileInfo.Size == 0 {
if fi, err = os.Stat(fileInfo.Path + "/" + filename); err != nil {
log.Error(err)
} else {
fileInfo.Size = fi.Size()
}
}
2018-12-30 17:17:40 +08:00
}
2018-12-30 23:31:42 +08:00
2019-01-17 01:05:27 +08:00
if info, err = this.checkPeerFileExist(peer, fileInfo.Md5); info.Md5 != "" {
2018-12-30 23:31:42 +08:00
continue
}
2019-01-02 17:46:30 +08:00
postURL = fmt.Sprintf("%s/%s", peer, "syncfile")
b := httplib.Post(postURL)
2019-01-17 01:05:27 +08:00
b.SetTimeout(time.Second*1, time.Second*1)
2019-01-02 17:46:30 +08:00
b.Header("Sync-Path", fileInfo.Path)
b.Param("name", filename)
2018-12-30 17:17:40 +08:00
b.Param("md5", fileInfo.Md5)
2019-01-16 15:30:53 +08:00
b.Param("timestamp", fmt.Sprintf("%d", fileInfo.TimeStamp))
2019-01-02 17:46:30 +08:00
b.PostFile("file", fileInfo.Path+"/"+filename)
b.Debug(true) //fuck this is a bug
2019-01-02 17:46:30 +08:00
result, err = b.String()
2018-12-30 17:17:40 +08:00
2019-01-02 17:46:30 +08:00
if !strings.HasPrefix(result, "http://") {
2018-12-30 23:31:42 +08:00
if write_log {
2019-01-03 10:08:01 +08:00
this.SaveFileMd5Log(fileInfo, CONST_Md5_ERROR_FILE_NAME)
2018-12-30 23:31:42 +08:00
}
2018-12-30 17:17:40 +08:00
} else {
2019-01-02 17:46:30 +08:00
log.Info(result)
2018-12-30 17:17:40 +08:00
if !this.util.Contains(peer, fileInfo.Peers) {
fileInfo.Peers = append(fileInfo.Peers, peer)
2019-01-02 17:46:30 +08:00
if data, err = json.Marshal(fileInfo); err != nil {
log.Error(err)
return
2018-12-30 17:17:40 +08:00
}
2019-01-16 10:28:51 +08:00
this.ldb.Put([]byte(fileInfo.Md5), data, nil)
2018-12-30 17:17:40 +08:00
}
}
if err != nil {
log.Error(err)
}
}
}
2019-01-03 10:08:01 +08:00
func (this *Server) SaveFileMd5Log(fileInfo *FileInfo, filename string) {
var (
err error
msg string
tmpFile *os.File
logpath string
outname string
)
outname = fileInfo.Name
if fileInfo.ReName != "" {
outname = fileInfo.ReName
}
2019-01-16 15:30:53 +08:00
logpath = DATA_DIR + "/" + time.Unix(fileInfo.TimeStamp, 0).Format("20060102")
2019-01-03 10:08:01 +08:00
if _, err = os.Stat(logpath); err != nil {
os.MkdirAll(logpath, 0777)
}
msg = fmt.Sprintf("%s|%d|%s\n", fileInfo.Md5, fileInfo.Size, fileInfo.Path+"/"+outname)
2019-01-16 15:30:53 +08:00
if tmpFile, err = os.OpenFile(logpath+"/"+filename, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0644); err != nil {
2019-01-03 10:08:01 +08:00
log.Error(err)
return
}
defer tmpFile.Close()
tmpFile.WriteString(msg)
}
2018-12-30 17:17:40 +08:00
func (this *Server) GetFileInfoByMd5(md5sum string) (*FileInfo, error) {
var (
data []byte
err error
fileInfo FileInfo
)
2019-01-16 10:28:51 +08:00
if data, err = this.ldb.Get([]byte(md5sum), nil); err != nil {
2018-12-30 17:17:40 +08:00
return nil, err
} else {
if err = json.Unmarshal(data, &fileInfo); err == nil {
return &fileInfo, nil
} else {
return nil, err
}
}
}
2018-12-30 23:31:42 +08:00
func (this *Server) checkPeerFileExist(peer string, md5sum string) (*FileInfo, error) {
var (
2019-01-16 12:08:47 +08:00
err error
fileInfo FileInfo
2018-12-30 23:31:42 +08:00
)
req := httplib.Get(peer + fmt.Sprintf("/check_file_exist?md5=%s", md5sum))
req.SetTimeout(time.Second*5, time.Second*5)
2019-01-16 12:08:47 +08:00
if err = req.ToJSON(&fileInfo); err != nil {
return &FileInfo{}, err
}
2019-01-01 23:31:14 +08:00
2019-01-16 12:08:47 +08:00
if fileInfo.Md5 == "" {
return &fileInfo, errors.New("not found")
2018-12-30 23:31:42 +08:00
}
2019-01-16 12:08:47 +08:00
return &fileInfo, nil
2018-12-30 23:31:42 +08:00
}
2018-12-30 17:17:40 +08:00
func (this *Server) CheckFileExist(w http.ResponseWriter, r *http.Request) {
var (
2018-12-30 23:31:42 +08:00
data []byte
2018-12-30 17:17:40 +08:00
err error
fileInfo *FileInfo
)
r.ParseForm()
md5sum := ""
if len(r.Form["md5"]) > 0 {
md5sum = r.Form["md5"][0]
} else {
return
}
if fileInfo, err = this.GetFileInfoByMd5(md5sum); fileInfo != nil {
2018-12-30 23:31:42 +08:00
if data, err = json.Marshal(fileInfo); err == nil {
w.Write(data)
return
}
2018-12-30 17:17:40 +08:00
}
2018-12-30 23:31:42 +08:00
data, _ = json.Marshal(FileInfo{})
w.Write(data)
2018-12-30 17:17:40 +08:00
}
func (this *Server) Sync(w http.ResponseWriter, r *http.Request) {
r.ParseForm()
date := ""
2018-12-30 23:31:42 +08:00
force := ""
is_force_upload := false
2019-01-17 21:46:29 +08:00
force=r.FormValue("force")
date=r.FormValue("date")
2018-12-30 23:31:42 +08:00
2019-01-17 21:46:29 +08:00
if force == "1" {
2018-12-30 23:31:42 +08:00
is_force_upload = true
}
2019-01-17 21:46:29 +08:00
if date=="" {
2019-01-16 10:28:51 +08:00
w.Write([]byte("require paramete date &force , date?=20181230"))
2018-12-30 17:17:40 +08:00
return
}
date = strings.Replace(date, ".", "", -1)
2019-01-03 10:08:01 +08:00
filename := DATA_DIR + "/" + date + "/" + CONST_Md5_ERROR_FILE_NAME
2018-12-30 17:17:40 +08:00
if this.util.FileExists(filename) {
2018-12-30 23:31:42 +08:00
go this.CheckFileAndSendToPeer(filename, is_force_upload)
2018-12-30 17:17:40 +08:00
}
2019-01-03 10:08:01 +08:00
filename = DATA_DIR + "/" + date + "/" + CONST_FILE_Md5_FILE_NAME
2018-12-30 17:17:40 +08:00
if this.util.FileExists(filename) {
2018-12-30 23:31:42 +08:00
go this.CheckFileAndSendToPeer(filename, is_force_upload)
2018-12-30 17:17:40 +08:00
}
w.Write([]byte("job is running"))
}
2019-01-01 23:31:14 +08:00
func (this *Server) GetFileInfoFromLevelDB(key string) (*FileInfo, error) {
var (
err error
data []byte
fileInfo FileInfo
)
2019-01-16 10:28:51 +08:00
if data, err = this.ldb.Get([]byte(key), nil); err != nil {
2019-01-01 23:31:14 +08:00
return nil, err
}
if err = json.Unmarshal(data, &fileInfo); err != nil {
return nil, err
}
return &fileInfo, nil
}
2019-01-02 17:46:30 +08:00
func (this *Server) SaveStat() {
2019-01-16 10:28:51 +08:00
SaveStatFunc := func() {
2019-01-02 17:46:30 +08:00
2019-01-16 10:28:51 +08:00
defer func() {
if re := recover(); re != nil {
buffer := debug.Stack()
log.Error("SaveStatFunc")
log.Error(re)
log.Error(string(buffer))
}
}()
2019-01-02 17:46:30 +08:00
2019-01-16 10:28:51 +08:00
stat := statMap.Get()
if v, ok := stat[CONST_STAT_FILE_TOTAL_SIZE_KEY]; ok {
switch v.(type) {
case int64:
if v.(int64) > 0 {
if data, err := json.Marshal(stat); err != nil {
log.Error(err)
} else {
this.util.WriteBinFile(CONST_STAT_FILE_NAME, data)
}
}
2019-01-02 17:46:30 +08:00
}
2019-01-16 10:28:51 +08:00
2019-01-02 17:46:30 +08:00
}
}
2019-01-16 10:28:51 +08:00
for {
time.Sleep(time.Minute * 1)
SaveStatFunc()
}
2019-01-02 17:46:30 +08:00
}
2019-01-01 23:31:14 +08:00
func (this *Server) SaveFileInfoToLevelDB(key string, fileInfo *FileInfo) (*FileInfo, error) {
var (
err error
data []byte
)
if data, err = json.Marshal(fileInfo); err != nil {
return fileInfo, err
}
2019-01-16 10:28:51 +08:00
if err = this.ldb.Put([]byte(key), data, nil); err != nil {
2019-01-01 23:31:14 +08:00
return fileInfo, err
}
return fileInfo, nil
}
2019-01-03 10:08:01 +08:00
func (this *Server) IsPeer(r *http.Request) bool {
var (
ip string
peer string
bflag bool
)
ip = this.util.GetClientIp(r)
ip = "http://" + ip
bflag = false
for _, peer = range Config().Peers {
if strings.HasPrefix(peer, ip) {
bflag = true
break
}
}
return bflag
}
2019-01-02 17:46:30 +08:00
func (this *Server) SyncFile(w http.ResponseWriter, r *http.Request) {
var (
2019-01-16 15:30:53 +08:00
err error
outPath string
outname string
// timestamp string
2019-01-02 17:46:30 +08:00
fileInfo FileInfo
tmpFile *os.File
fi os.FileInfo
uploadFile multipart.File
)
2019-01-03 10:08:01 +08:00
if !this.IsPeer(r) {
log.Error(fmt.Sprintf(" not is peer,ip:%s", this.util.GetClientIp(r)))
return
}
2019-01-02 17:46:30 +08:00
if r.Method == "POST" {
2019-01-16 15:30:53 +08:00
2019-01-02 17:46:30 +08:00
fileInfo.Path = r.Header.Get("Sync-Path")
fileInfo.Md5 = r.PostFormValue("md5")
fileInfo.Name = r.PostFormValue("name")
2019-01-16 15:30:53 +08:00
fileInfo.TimeStamp, err = strconv.ParseInt(r.PostFormValue("timestamp"), 10, 64)
if err != nil {
fileInfo.TimeStamp = time.Now().Unix()
log.Error(err)
}
2019-01-02 18:49:02 +08:00
if uploadFile, _, err = r.FormFile("file"); err != nil {
w.Write([]byte(err.Error()))
log.Error(err)
return
}
2019-01-02 17:46:30 +08:00
fileInfo.Peers = []string{}
defer uploadFile.Close()
if v, _ := this.GetFileInfoFromLevelDB(fileInfo.Md5); v != nil && v.Md5 != "" {
outname = v.Name
if v.ReName != "" {
outname = v.ReName
}
2019-01-07 17:19:06 +08:00
p := strings.Replace(v.Path, STORE_DIR+"/", "", 1)
download_url := fmt.Sprintf("http://%s/%s", r.Host, Config().Group+"/"+p+"/"+outname)
2019-01-02 17:46:30 +08:00
w.Write([]byte(download_url))
return
}
os.MkdirAll(fileInfo.Path, 0777)
outPath = fileInfo.Path + "/" + fileInfo.Name
if this.util.FileExists(outPath) {
if tmpFile, err = os.Open(outPath); err != nil {
log.Error(err)
w.Write([]byte(err.Error()))
return
}
if this.util.GetFileMd5(tmpFile) != fileInfo.Md5 {
tmpFile.Close()
log.Error("md5 !=fileInfo.Md5 ")
w.Write([]byte("md5 !=fileInfo.Md5 "))
return
}
}
if tmpFile, err = os.Create(outPath); err != nil {
log.Error(err)
w.Write([]byte(err.Error()))
return
}
defer tmpFile.Close()
if _, err = io.Copy(tmpFile, uploadFile); err != nil {
w.Write([]byte(err.Error()))
log.Error(err)
2019-01-16 15:30:53 +08:00
2019-01-02 17:46:30 +08:00
return
}
if this.util.GetFileMd5(tmpFile) != fileInfo.Md5 {
w.Write([]byte("md5 error"))
tmpFile.Close()
os.Remove(outPath)
2019-01-16 15:30:53 +08:00
2019-01-02 17:46:30 +08:00
return
}
if fi, err = os.Stat(outPath); err != nil {
log.Error(err)
} else {
2019-01-03 10:08:01 +08:00
fileInfo.Size = fi.Size()
2019-01-02 17:46:30 +08:00
statMap.AddCountInt64(CONST_STAT_FILE_TOTAL_SIZE_KEY, fi.Size())
statMap.AddCountInt64(CONST_STAT_FILE_COUNT_KEY, 1)
2019-01-16 15:30:53 +08:00
2019-01-02 17:46:30 +08:00
}
if fileInfo.Peers == nil {
fileInfo.Peers = []string{fmt.Sprintf("http://%s", r.Host)}
}
if _, err = this.SaveFileInfoToLevelDB(this.util.MD5(outPath), &fileInfo); err != nil {
log.Error(err)
}
if _, err = this.SaveFileInfoToLevelDB(fileInfo.Md5, &fileInfo); err != nil {
log.Error(err)
}
2019-01-03 10:08:01 +08:00
this.SaveFileMd5Log(&fileInfo, CONST_FILE_Md5_FILE_NAME)
2019-01-07 17:19:06 +08:00
p := strings.Replace(fileInfo.Path, STORE_DIR+"/", "", 1)
download_url := fmt.Sprintf("http://%s/%s", r.Host, Config().Group+"/"+p+"/"+fileInfo.Name)
2019-01-02 17:46:30 +08:00
w.Write([]byte(download_url))
}
}
2019-01-08 10:23:13 +08:00
func (this *Server) CheckScene(scene string) (bool, error) {
if len(Config().Scenes) == 0 {
return true, nil
}
if !this.util.Contains(scene, Config().Scenes) {
return false, errors.New("not valid scene")
}
return true, nil
}
2019-01-08 16:53:03 +08:00
func (this *Server) RemoveFile(w http.ResponseWriter, r *http.Request) {
var (
err error
md5sum string
fileInfo *FileInfo
fpath string
)
r.ParseForm()
md5sum = r.FormValue("md5")
if len(md5sum) != 32 {
w.Write([]byte("md5 unvalid"))
return
}
if fileInfo, err = this.GetFileInfoByMd5(md5sum); err != nil {
w.Write([]byte(err.Error()))
return
}
if fileInfo.ReName != "" {
fpath = fileInfo.Path + "/" + fileInfo.ReName
} else {
fpath = fileInfo.Path + "/" + fileInfo.Name
}
if fileInfo.Path != "" && this.util.FileExists(fpath) {
if err = os.Remove(fpath); err != nil {
w.Write([]byte(err.Error()))
return
} else {
w.Write([]byte("remove success"))
return
}
}
w.Write([]byte("fail remove"))
}
2018-05-10 18:19:04 +08:00
func (this *Server) Upload(w http.ResponseWriter, r *http.Request) {
2019-01-02 17:46:30 +08:00
var (
err error
// pathname string
outname string
md5sum string
fileInfo FileInfo
uploadFile multipart.File
uploadHeader *multipart.FileHeader
2019-01-08 10:23:13 +08:00
scene string
2019-01-08 16:53:03 +08:00
output string
fileResult FileResult
data []byte
2019-01-09 16:04:15 +08:00
domain string
2019-01-02 17:46:30 +08:00
)
2017-09-09 16:40:55 +08:00
if r.Method == "POST" {
2019-01-01 23:31:14 +08:00
// name := r.PostFormValue("name")
2019-01-02 17:46:30 +08:00
2019-01-04 10:22:03 +08:00
// fileInfo.Path = r.Header.Get("Sync-Path")
if Config().EnableCustomPath {
2019-01-08 16:53:03 +08:00
fileInfo.Path = r.FormValue("path")
fileInfo.Path = strings.Trim(fileInfo.Path, "/")
2019-01-04 10:22:03 +08:00
}
2019-01-08 16:53:03 +08:00
scene = r.FormValue("scene")
2019-01-09 16:04:15 +08:00
if scene == "" {
//Just for Compatibility
scene = r.FormValue("scenes")
}
2019-01-08 16:53:03 +08:00
md5sum = r.FormValue("md5")
output = r.FormValue("output")
fileInfo.Md5 = md5sum
2019-01-09 16:04:15 +08:00
if uploadFile, uploadHeader, err = r.FormFile("file"); err != nil {
w.Write([]byte(err.Error()))
return
}
2019-01-02 17:46:30 +08:00
fileInfo.Peers = []string{}
2019-01-16 15:30:53 +08:00
fileInfo.TimeStamp = time.Now().Unix()
2018-12-30 17:17:40 +08:00
2019-01-08 10:23:13 +08:00
if scene == "" {
scene = Config().DefaultScene
}
2019-01-08 16:53:03 +08:00
if output == "" {
output = "text"
}
if !this.util.Contains(output, []string{"json", "text"}) {
w.Write([]byte("output just support json or text"))
return
}
2019-01-08 10:23:13 +08:00
fileInfo.Scene = scene
if _, err = this.CheckScene(scene); err != nil {
w.Write([]byte(err.Error()))
return
}
2019-01-09 16:04:15 +08:00
if Config().DownloadDomain != "" {
domain = fmt.Sprintf("http://%s", Config().DownloadDomain)
} else {
domain = fmt.Sprintf("http://%s", r.Host)
}
2017-09-09 16:40:55 +08:00
if err != nil {
2018-05-10 13:31:34 +08:00
log.Error(err)
2017-09-09 16:40:55 +08:00
fmt.Printf("FromFileErr")
http.Redirect(w, r, "/", http.StatusMovedPermanently)
return
}
2019-01-02 17:46:30 +08:00
SaveUploadFile := func(file multipart.File, header *multipart.FileHeader, fileInfo *FileInfo) (*FileInfo, error) {
var (
err error
outFile *os.File
folder string
)
2018-05-10 13:31:34 +08:00
2018-12-31 17:46:24 +08:00
defer file.Close()
2018-12-30 17:17:40 +08:00
2019-01-02 17:46:30 +08:00
fileInfo.Name = header.Filename
2019-01-02 00:03:48 +08:00
if Config().RenameFile {
2019-01-02 17:46:30 +08:00
fileInfo.ReName = this.util.MD5(this.util.GetUUID()) + path.Ext(fileInfo.Name)
2018-12-30 17:17:40 +08:00
}
2019-01-02 17:46:30 +08:00
folder = time.Now().Format("20060102/15/04")
2019-01-08 10:23:13 +08:00
if fileInfo.Scene != "" {
folder = fmt.Sprintf(STORE_DIR+"/%s/%s", fileInfo.Scene, folder)
} else {
folder = fmt.Sprintf(STORE_DIR+"/%s", folder)
}
2019-01-04 10:22:03 +08:00
if fileInfo.Path != "" {
if strings.HasPrefix(fileInfo.Path, STORE_DIR) {
folder = fileInfo.Path
} else {
2019-01-08 10:23:13 +08:00
2019-01-04 10:22:03 +08:00
folder = STORE_DIR + "/" + fileInfo.Path
}
2018-12-29 20:31:29 +08:00
}
2018-12-30 17:17:40 +08:00
if !util.FileExists(folder) {
os.MkdirAll(folder, 0777)
}
2019-01-02 17:46:30 +08:00
outPath := fmt.Sprintf(folder+"/%s", fileInfo.Name)
2019-01-02 00:03:48 +08:00
if Config().RenameFile {
2019-01-02 17:46:30 +08:00
outPath = fmt.Sprintf(folder+"/%s", fileInfo.ReName)
2019-01-02 00:03:48 +08:00
}
2018-12-30 17:17:40 +08:00
2019-01-01 23:31:14 +08:00
if this.util.FileExists(outPath) {
for i := 0; i < 10000; i++ {
2019-01-02 17:46:30 +08:00
outPath = fmt.Sprintf(folder+"/%d_%s", i, header.Filename)
fileInfo.Name = fmt.Sprintf("%d_%s", i, header.Filename)
2019-01-01 23:31:14 +08:00
if !this.util.FileExists(outPath) {
break
}
}
}
2018-12-30 17:17:40 +08:00
log.Info(fmt.Sprintf("upload: %s", outPath))
2018-05-10 13:31:34 +08:00
2019-01-02 17:46:30 +08:00
if outFile, err = os.Create(outPath); err != nil {
return fileInfo, err
}
defer outFile.Close()
2018-12-31 17:46:24 +08:00
2018-12-30 17:17:40 +08:00
if err != nil {
log.Error(err)
2019-01-02 17:46:30 +08:00
return fileInfo, errors.New("(error)fail," + err.Error())
2018-12-30 17:17:40 +08:00
}
2019-01-02 17:46:30 +08:00
if _, err = io.Copy(outFile, file); err != nil {
2018-12-30 17:17:40 +08:00
log.Error(err)
2019-01-02 17:46:30 +08:00
return fileInfo, errors.New("(error)fail," + err.Error())
2018-12-30 17:17:40 +08:00
}
2019-01-02 17:46:30 +08:00
v := util.GetFileMd5(outFile)
fileInfo.Md5 = v
fileInfo.Path = folder
2018-05-10 13:31:34 +08:00
2019-01-02 17:46:30 +08:00
fileInfo.Peers = append(fileInfo.Peers, fmt.Sprintf("http://%s", r.Host))
2018-12-31 17:46:24 +08:00
2019-01-02 17:46:30 +08:00
return fileInfo, nil
2017-09-09 16:40:55 +08:00
2019-01-02 17:46:30 +08:00
}
2018-12-30 17:17:40 +08:00
2019-01-02 17:46:30 +08:00
SaveUploadFile(uploadFile, uploadHeader, &fileInfo)
2018-12-30 17:17:40 +08:00
2019-01-02 17:46:30 +08:00
if v, _ := this.GetFileInfoFromLevelDB(fileInfo.Md5); v != nil && v.Md5 != "" {
2018-12-31 11:26:51 +08:00
2019-01-02 17:46:30 +08:00
if Config().RenameFile {
os.Remove(fileInfo.Path + "/" + fileInfo.ReName)
2018-12-30 17:17:40 +08:00
} else {
2019-01-02 17:46:30 +08:00
os.Remove(fileInfo.Path + "/" + fileInfo.Name)
}
outname = v.Name
if v.ReName != "" {
outname = v.ReName
2018-12-30 17:17:40 +08:00
}
2019-01-07 17:19:06 +08:00
p := strings.Replace(v.Path, STORE_DIR+"/", "", 1)
2019-01-09 16:04:15 +08:00
p = Config().Group + "/" + p + "/" + outname
download_url := fmt.Sprintf("http://%s/%s", r.Host, p)
2019-01-03 11:20:11 +08:00
if Config().DownloadDomain != "" {
2019-01-09 16:04:15 +08:00
download_url = fmt.Sprintf("http://%s/%s", Config().DownloadDomain, p)
2019-01-03 11:20:11 +08:00
}
2019-01-08 16:53:03 +08:00
if output == "json" {
fileResult.Url = download_url
fileResult.Md5 = v.Md5
2019-01-09 16:04:15 +08:00
fileResult.Path = "/" + p
fileResult.Domain = domain
fileResult.Scene = fileInfo.Scene
// Just for Compatibility
fileResult.Src = fileResult.Path
fileResult.Scenes = fileInfo.Scene
2018-12-30 18:18:42 +08:00
2019-01-08 16:53:03 +08:00
if data, err = json.Marshal(fileResult); err != nil {
w.Write([]byte(err.Error()))
return
}
w.Write(data)
} else {
w.Write([]byte(download_url))
}
2019-01-02 17:46:30 +08:00
return
2018-12-30 17:17:40 +08:00
}
2019-01-02 17:46:30 +08:00
if fileInfo.Md5 == "" {
log.Warn(" fileInfo.Md5 is null")
return
2018-12-31 18:00:13 +08:00
}
2019-01-02 17:46:30 +08:00
if md5sum != "" && fileInfo.Md5 != md5sum {
log.Warn(" fileInfo.Md5 and md5sum !=")
2018-12-31 18:00:13 +08:00
return
}
2018-05-10 13:31:34 +08:00
2019-01-02 17:46:30 +08:00
UploadToPeer := func(fileInfo *FileInfo) {
var (
err error
pathMd5 string
fullpath string
2019-01-09 12:05:20 +08:00
// data []byte
2019-01-02 17:46:30 +08:00
)
2019-01-09 12:05:20 +08:00
if _, err = json.Marshal(fileInfo); err != nil {
2019-01-02 17:46:30 +08:00
log.Error(err)
log.Error(fmt.Sprintf("UploadToPeer fail: %v", fileInfo))
return
2018-12-30 17:17:40 +08:00
}
2019-01-09 12:05:20 +08:00
if _, err = this.SaveFileInfoToLevelDB(fileInfo.Md5, fileInfo); err != nil {
log.Error("SaveFileInfoToLevelDB fail", err)
2019-01-02 17:46:30 +08:00
}
2019-01-01 23:31:14 +08:00
2019-01-02 17:46:30 +08:00
fullpath = fileInfo.Path + "/" + fileInfo.Name
2019-01-01 23:31:14 +08:00
2019-01-02 17:46:30 +08:00
if Config().RenameFile {
fullpath = fileInfo.Path + "/" + fileInfo.ReName
}
2019-01-01 23:31:14 +08:00
2019-01-02 17:46:30 +08:00
pathMd5 = this.util.MD5(fullpath)
2018-05-10 18:19:04 +08:00
2019-01-09 12:05:20 +08:00
if _, err = this.SaveFileInfoToLevelDB(pathMd5, fileInfo); err != nil {
log.Error("SaveFileInfoToLevelDB fail", err)
2018-05-10 18:19:04 +08:00
}
2018-12-30 17:17:40 +08:00
2019-01-17 01:05:27 +08:00
if len(queueToPeers) < CONST_QUEUE_SIZE {
queueToPeers <- FileInfo{Name: fileInfo.Name,
Peers: []string{}, TimeStamp: fileInfo.TimeStamp,
Path: fileInfo.Path, Md5: fileInfo.Md5, ReName: fileInfo.ReName,
Size: fileInfo.Size, Scene: fileInfo.Scene}
}
// go this.postFileToPeer(fileInfo, true)
2018-12-30 17:17:40 +08:00
2018-05-10 18:19:04 +08:00
}
2018-05-10 13:31:34 +08:00
2019-01-02 17:46:30 +08:00
UploadToPeer(&fileInfo)
outname = fileInfo.Name
2018-12-30 17:17:40 +08:00
2019-01-02 00:03:48 +08:00
if Config().RenameFile {
2019-01-02 17:46:30 +08:00
outname = fileInfo.ReName
2019-01-02 00:03:48 +08:00
}
2019-01-02 17:46:30 +08:00
if fi, err := os.Stat(fileInfo.Path + "/" + outname); err != nil {
log.Error(err)
} else {
2019-01-02 18:09:02 +08:00
fileInfo.Size = fi.Size()
2019-01-02 17:46:30 +08:00
statMap.AddCountInt64(CONST_STAT_FILE_TOTAL_SIZE_KEY, fi.Size())
2019-01-02 18:09:02 +08:00
statMap.AddCountInt64(CONST_STAT_FILE_COUNT_KEY, 1)
2019-01-09 12:05:20 +08:00
statMap.AddCountInt64(this.util.GetToDay()+"_"+CONST_STAT_FILE_TOTAL_SIZE_KEY, fi.Size())
statMap.AddCountInt64(this.util.GetToDay()+"_"+CONST_STAT_FILE_COUNT_KEY, 1)
2019-01-02 17:46:30 +08:00
}
2019-01-03 10:08:01 +08:00
this.SaveFileMd5Log(&fileInfo, CONST_FILE_Md5_FILE_NAME)
2019-01-07 17:19:06 +08:00
p := strings.Replace(fileInfo.Path, STORE_DIR+"/", "", 1)
2019-01-09 16:04:15 +08:00
p = Config().Group + "/" + p + "/" + outname
download_url := fmt.Sprintf("http://%s/%s", r.Host, p)
2019-01-03 11:20:11 +08:00
if Config().DownloadDomain != "" {
2019-01-09 16:04:15 +08:00
download_url = fmt.Sprintf("http://%s/%s", Config().DownloadDomain, p)
2019-01-03 11:20:11 +08:00
}
2019-01-08 16:53:03 +08:00
if output == "json" {
fileResult.Url = download_url
fileResult.Md5 = fileInfo.Md5
2019-01-09 16:04:15 +08:00
fileResult.Path = "/" + p
fileResult.Domain = domain
fileResult.Scene = fileInfo.Scene
// Just for Compatibility
fileResult.Src = fileResult.Path
fileResult.Scenes = fileInfo.Scene
2019-01-08 16:53:03 +08:00
if data, err = json.Marshal(fileResult); err != nil {
w.Write([]byte(err.Error()))
return
}
w.Write(data)
} else {
w.Write([]byte(download_url))
}
2018-12-31 17:46:24 +08:00
return
2018-05-10 13:31:34 +08:00
} else {
2018-12-29 20:31:29 +08:00
w.Write([]byte("(error)fail,please use post method"))
2018-05-10 13:31:34 +08:00
return
}
2019-01-02 17:46:30 +08:00
}
2018-05-10 13:31:34 +08:00
2019-01-09 12:05:20 +08:00
func (this *Server) SendToMail(to, subject, body, mailtype string) error {
host := Config().Mail.Host
user := Config().Mail.User
password := Config().Mail.Password
hp := strings.Split(host, ":")
auth := smtp.PlainAuth("", user, password, hp[0])
var content_type string
if mailtype == "html" {
content_type = "Content-Type: text/" + mailtype + "; charset=UTF-8"
} else {
content_type = "Content-Type: text/plain" + "; charset=UTF-8"
}
msg := []byte("To: " + to + "\r\nFrom: " + user + ">\r\nSubject: " + "\r\n" + content_type + "\r\n\r\n" + body)
send_to := strings.Split(to, ";")
err := smtp.SendMail(host, auth, user, send_to, msg)
return err
}
2019-01-04 10:22:03 +08:00
func (this *Server) BenchMark(w http.ResponseWriter, r *http.Request) {
t := time.Now()
batch := new(leveldb.Batch)
for i := 0; i < 100000000; i++ {
f := FileInfo{}
f.Peers = []string{"http://192.168.0.1", "http://192.168.2.5"}
f.Path = "20190201/19/02"
s := strconv.Itoa(i)
s = util.MD5(s)
f.Name = s
f.Md5 = s
// server.SaveFileInfoToLevelDB(s, &f)
if data, err := json.Marshal(&f); err == nil {
batch.Put([]byte(s), data)
}
if i%10000 == 0 {
if batch.Len() > 0 {
2019-01-16 10:28:51 +08:00
server.ldb.Write(batch, nil)
2019-01-04 10:22:03 +08:00
// batch = new(leveldb.Batch)
batch.Reset()
}
fmt.Println(i, time.Since(t).Seconds())
}
//fmt.Println(server.GetFileInfoByMd5(s))
}
util.WriteFile("time.txt", time.Since(t).String())
fmt.Println(time.Since(t).String())
}
2019-01-16 15:30:53 +08:00
func (this *Server) RepairStatWeb(w http.ResponseWriter, r *http.Request) {
this.RepairStat()
w.Write([]byte("ok"))
}
2019-01-17 21:46:29 +08:00
2019-01-02 17:46:30 +08:00
func (this *Server) Stat(w http.ResponseWriter, r *http.Request) {
2019-01-17 21:46:29 +08:00
data:=this.util.JsonEncodePretty(this.GetStat())
w.Write([]byte(data))
}
func (this *Server) GetStat() []StatDateFileInfo {
2019-01-09 12:05:20 +08:00
var (
min int64
max int64
err error
i int64
2019-01-17 21:46:29 +08:00
2019-01-09 12:05:20 +08:00
rows []StatDateFileInfo
)
min = 20190101
max = 20190101
for k, _ := range statMap.Get() {
ks := strings.Split(k, "_")
if len(ks) == 2 {
if i, err = strconv.ParseInt(ks[0], 10, 64); err != nil {
continue
}
if i >= max {
max = i
}
if i < min {
min = i
}
}
}
for i := min; i <= max; i++ {
s := fmt.Sprintf("%d", i)
if v, ok := statMap.GetValue(s + "_" + CONST_STAT_FILE_TOTAL_SIZE_KEY); ok {
var info StatDateFileInfo
info.Date = s
switch v.(type) {
case int64:
info.TotalSize = v.(int64)
}
if v, ok := statMap.GetValue(s + "_" + CONST_STAT_FILE_COUNT_KEY); ok {
switch v.(type) {
case int64:
info.FileCount = v.(int64)
}
}
rows = append(rows, info)
2018-05-10 13:31:34 +08:00
2019-01-02 17:46:30 +08:00
}
2019-01-09 12:05:20 +08:00
2017-09-09 16:40:55 +08:00
}
2019-01-09 13:13:21 +08:00
if v, ok := statMap.GetValue(CONST_STAT_FILE_COUNT_KEY); ok {
var info StatDateFileInfo
info.Date = "all"
info.FileCount = v.(int64)
if v, ok := statMap.GetValue(CONST_STAT_FILE_TOTAL_SIZE_KEY); ok {
info.TotalSize = v.(int64)
}
rows = append(rows, info)
}
2019-01-17 21:46:29 +08:00
return rows
2019-01-09 12:05:20 +08:00
}
func (this *Server) RegisterExit() {
c := make(chan os.Signal)
signal.Notify(c, syscall.SIGHUP, syscall.SIGINT, syscall.SIGTERM, syscall.SIGQUIT)
go func() {
for s := range c {
switch s {
case syscall.SIGHUP, syscall.SIGINT, syscall.SIGTERM, syscall.SIGQUIT:
2019-01-16 10:28:51 +08:00
this.ldb.Close()
2019-01-09 12:05:20 +08:00
log.Info("Exit", s)
os.Exit(1)
}
}
}()
}
2019-01-17 01:05:27 +08:00
func (this *Server) Consumer() {
ConsumerFunc := func() {
for {
fileInfo := <-queueToPeers
this.postFileToPeer(&fileInfo, true)
}
}
for i := 0; i < 50; i++ {
go ConsumerFunc()
}
}
2019-01-17 21:46:29 +08:00
func (this *Server) AutoRepair() {
AutoRepairFunc := func() {
var (
dateStats []StatDateFileInfo
err error
countKey string
)
defer func() {
if re := recover(); re != nil {
buffer := debug.Stack()
log.Error("AutoRepair")
log.Error(re)
log.Error(string(buffer))
}
}()
Update:= func(peer string, dateStat StatDateFileInfo) {
req:=httplib.Get(fmt.Sprintf("%s/sync?date=%s&force=%s",peer, dateStat.Date,"1"))
req.SetTimeout(time.Second*5,time.Second*5)
if _,err=req.String();err!=nil {
log.Error(err)
}
log.Info(fmt.Sprintf("syn file from %s date %s",peer,dateStat.Date))
}
for _,peer:=range Config().Peers {
req:=httplib.Get(fmt.Sprintf("%s/%s",peer,"stat"))
req.SetTimeout(time.Second*5,time.Second*5)
if err=req.ToJSON(&dateStats);err!=nil {
log.Error(err)
continue
}
for _,dateStat:=range dateStats {
if dateStat.Date=="all" {
continue
}
countKey=dateStat.Date+"_"+ CONST_STAT_FILE_COUNT_KEY
if v,ok:= statMap.GetValue(countKey);ok {
switch v.(type) {
case int64:
if v.(int64)<dateStat.FileCount {
Update(peer,dateStat)
}
}
} else {
Update(peer,dateStat)
}
}
}
}
for {
time.Sleep(time.Second*10)
AutoRepairFunc()
time.Sleep(time.Minute*60)
2019-01-17 21:46:29 +08:00
}
}
2019-01-09 12:05:20 +08:00
func (this *Server) Check() {
2019-01-09 13:13:21 +08:00
check := func() {
defer func() {
if re := recover(); re != nil {
buffer := debug.Stack()
log.Error("postFileToPeer")
log.Error(re)
log.Error(string(buffer))
}
}()
var (
status Status
err error
subject string
body string
2019-01-09 14:48:15 +08:00
req *httplib.BeegoHTTPRequest
2019-01-09 13:13:21 +08:00
)
for _, peer := range Config().Peers {
2019-01-09 14:48:15 +08:00
req = httplib.Get(peer + "/status")
2019-01-09 13:13:21 +08:00
req.SetTimeout(time.Second*5, time.Second*5)
err = req.ToJSON(&status)
if status.Status != "ok" {
for _, to := range Config().AlramReceivers {
2019-01-09 14:48:15 +08:00
subject = "fastdfs server error"
2019-01-09 13:13:21 +08:00
if err != nil {
2019-01-09 14:48:15 +08:00
body = fmt.Sprintf("%s\nserver:%s\nerror:\n%s", subject, peer, err.Error())
} else {
body = fmt.Sprintf("%s\nserver:%s\n", subject, peer)
}
if err = this.SendToMail(to, subject, body, "text"); err != nil {
log.Error(err)
}
}
if Config().AlarmUrl != "" {
req = httplib.Post(Config().AlarmUrl)
req.SetTimeout(time.Second*10, time.Second*10)
req.Param("message", body)
req.Param("subject", subject)
if _, err = req.String(); err != nil {
log.Error(err)
2019-01-09 13:13:21 +08:00
}
2019-01-09 14:48:15 +08:00
2019-01-09 13:13:21 +08:00
}
}
}
}
go func() {
for {
2019-01-09 14:57:43 +08:00
time.Sleep(time.Minute * 10)
2019-01-09 13:13:21 +08:00
check()
}
}()
}
func (this *Server) Status(w http.ResponseWriter, r *http.Request) {
var (
status Status
err error
data []byte
)
status.Status = "ok"
if data, err = json.Marshal(&status); err != nil {
status.Status = "fail"
status.Message = err.Error()
w.Write(data)
return
}
w.Write(data)
2019-01-09 12:05:20 +08:00
}
func (this *Server) HeartBeat(w http.ResponseWriter, r *http.Request) {
2019-01-02 17:46:30 +08:00
2017-09-09 16:40:55 +08:00
}
2018-05-10 18:19:04 +08:00
func (this *Server) Index(w http.ResponseWriter, r *http.Request) {
2019-01-03 10:08:01 +08:00
if Config().EnableWebUpload {
fmt.Fprintf(w,
2019-01-08 10:23:13 +08:00
fmt.Sprintf(`<html>
2017-09-09 16:40:55 +08:00
<head>
<meta charset="utf-8"></meta>
<title>Uploader</title>
2019-01-08 16:53:03 +08:00
<style>
form {
bargin
}
.form-line {
display:block;
}
</style>
2017-09-09 16:40:55 +08:00
</head>
<body>
<form action="/upload" method="post" enctype="multipart/form-data">
2019-01-08 16:53:03 +08:00
<span class="form-line">文件(file):<input type="file" id="file" name="file" ></span>
<span class="form-line">场景(scene):<input type="text" id="scene" name="scene" value="%s"></span>
<span class="form-line">输出(output):<input type="text" id="output" name="output" value="json"></span>
<span class="form-line">自定义路径(path):<input type="text" id="path" name="path" value=""></span>
2017-09-09 16:40:55 +08:00
<input type="submit" name="submit" value="upload">
</form>
</body>
2019-01-08 10:23:13 +08:00
</html>`, Config().DefaultScene))
2019-01-03 10:08:01 +08:00
} else {
w.Write([]byte("web upload deny"))
}
2017-09-09 16:40:55 +08:00
}
2018-05-10 18:19:04 +08:00
func init() {
2018-12-30 17:17:40 +08:00
server.util = util
for _, folder := range FOLDERS {
os.Mkdir(folder, 0777)
}
flag.Parse()
2019-01-01 14:41:57 +08:00
if !util.FileExists(CONST_CONF_FILE_NAME) {
peer := "http://" + util.GetPulicIP() + ":8080"
cfg := fmt.Sprintf(cfgJson, peer)
util.WriteFile(CONST_CONF_FILE_NAME, cfg)
}
2018-12-30 18:18:42 +08:00
if logger, err := log.LoggerFromConfigAsBytes([]byte(logConfigStr)); err != nil {
panic(err)
} else {
log.ReplaceLogger(logger)
}
if _logacc, err := log.LoggerFromConfigAsBytes([]byte(logAccessConfigStr)); err == nil {
logacc = _logacc
log.Info("succes init log access")
} else {
log.Error(err.Error())
}
2019-01-01 14:41:57 +08:00
ParseConfig(CONST_CONF_FILE_NAME)
2019-01-17 01:05:27 +08:00
if Config().QueueSize == 0 {
Config().QueueSize = CONST_QUEUE_SIZE
}
2019-01-07 17:19:06 +08:00
staticHandler = http.StripPrefix("/"+Config().Group+"/", http.FileServer(http.Dir(STORE_DIR)))
2019-01-01 14:41:57 +08:00
2019-01-16 10:28:51 +08:00
initComponent(false)
2017-09-09 16:40:55 +08:00
}
2019-01-16 10:28:51 +08:00
func initComponent(is_reload bool) {
2019-01-02 17:46:30 +08:00
var (
err error
2019-01-16 10:28:51 +08:00
ldb *leveldb.DB
2019-01-02 17:46:30 +08:00
ip string
stat map[string]interface{}
data []byte
count int64
)
ip = util.GetPulicIP()
2018-12-30 17:17:40 +08:00
ex, _ := regexp.Compile("\\d+\\.\\d+\\.\\d+\\.\\d+")
2019-01-01 14:41:57 +08:00
var peers []string
for _, peer := range Config().Peers {
if util.Contains(ip, ex.FindAllString(peer, -1)) {
continue
}
if strings.HasPrefix(peer, "http") {
peers = append(peers, peer)
} else {
peers = append(peers, "http://"+peer)
2018-12-30 17:17:40 +08:00
}
}
2019-01-01 14:41:57 +08:00
Config().Peers = peers
2019-01-16 10:28:51 +08:00
if !is_reload {
ldb, err = leveldb.OpenFile(CONST_LEVELDB_FILE_NAME, nil)
if err != nil {
log.Error(err)
panic(err)
}
server.ldb = ldb
2018-12-30 17:17:40 +08:00
}
2019-01-16 10:28:51 +08:00
FormatStatInfo := func() {
2019-01-02 17:46:30 +08:00
2019-01-16 10:28:51 +08:00
if util.FileExists(CONST_STAT_FILE_NAME) {
if data, err = util.ReadBinFile(CONST_STAT_FILE_NAME); err != nil {
2019-01-02 17:46:30 +08:00
log.Error(err)
} else {
2019-01-16 10:28:51 +08:00
if err = json.Unmarshal(data, &stat); err != nil {
log.Error(err)
} else {
for k, v := range stat {
switch v.(type) {
case float64:
vv := strings.Split(fmt.Sprintf("%f", v), ".")[0]
if count, err = strconv.ParseInt(vv, 10, 64); err != nil {
log.Error(err)
} else {
statMap.Put(k, count)
}
2019-01-02 17:46:30 +08:00
2019-01-16 10:28:51 +08:00
default:
statMap.Put(k, v)
2019-01-02 17:46:30 +08:00
2019-01-16 10:28:51 +08:00
}
}
2019-01-02 17:46:30 +08:00
}
}
2019-01-16 10:28:51 +08:00
2019-01-02 17:46:30 +08:00
}
}
2019-01-16 10:28:51 +08:00
if !is_reload {
FormatStatInfo()
}
//Timer
2019-01-02 17:46:30 +08:00
2018-12-30 17:17:40 +08:00
}
2018-12-30 18:18:42 +08:00
type HttpHandler struct {
}
2018-05-10 18:19:04 +08:00
2018-12-30 18:18:42 +08:00
func (HttpHandler) ServeHTTP(res http.ResponseWriter, req *http.Request) {
status_code := "200"
defer func(t time.Time) {
logStr := fmt.Sprintf("[Access] %s | %v | %s | %s | %s | %s |%s",
time.Now().Format("2006/01/02 - 15:04:05"),
res.Header(),
time.Since(t).String(),
util.GetClientIp(req),
req.Method,
status_code,
req.RequestURI,
)
logacc.Info(logStr)
}(time.Now())
defer func() {
if err := recover(); err != nil {
status_code = "500"
res.WriteHeader(500)
print(err)
buff := debug.Stack()
log.Error(err)
log.Error(string(buff))
2018-05-10 13:31:34 +08:00
2018-12-30 18:18:42 +08:00
}
}()
http.DefaultServeMux.ServeHTTP(res, req)
}
2019-01-16 10:28:51 +08:00
func (this *Server) Main() {
2017-09-09 16:40:55 +08:00
2018-12-30 17:17:40 +08:00
go func() {
for {
2019-01-16 10:28:51 +08:00
this.CheckFileAndSendToPeer("", false)
2019-01-02 17:51:08 +08:00
time.Sleep(time.Second * time.Duration(Config().RefreshInterval))
2018-12-31 17:46:24 +08:00
util.RemoveEmptyDir(STORE_DIR)
2018-12-30 17:17:40 +08:00
}
}()
2019-01-16 15:30:53 +08:00
go server.RepairStat()
2019-01-16 10:28:51 +08:00
go this.SaveStat()
go this.Check()
2019-01-17 01:05:27 +08:00
go this.Consumer()
2019-01-17 22:03:53 +08:00
if Config().AutoRepair {
go this.AutoRepair()
}
2019-01-16 10:28:51 +08:00
http.HandleFunc("/", this.Index)
http.HandleFunc("/check_file_exist", this.CheckFileExist)
http.HandleFunc("/upload", this.Upload)
http.HandleFunc("/delete", this.RemoveFile)
http.HandleFunc("/sync", this.Sync)
http.HandleFunc("/stat", this.Stat)
2019-01-16 15:30:53 +08:00
http.HandleFunc("/repair_stat", this.RepairStatWeb)
2019-01-16 10:28:51 +08:00
http.HandleFunc("/status", this.Status)
http.HandleFunc("/syncfile", this.SyncFile)
http.HandleFunc("/"+Config().Group+"/", this.Download)
2019-01-01 14:41:57 +08:00
fmt.Println("Listen on " + Config().Addr)
2019-01-04 10:22:03 +08:00
err := http.ListenAndServe(Config().Addr, new(HttpHandler))
log.Error(err)
fmt.Println(err)
2019-01-16 10:28:51 +08:00
}
func main() {
2019-01-17 21:46:29 +08:00
2019-01-16 10:28:51 +08:00
server.Main()
2018-12-30 17:17:40 +08:00
2017-09-09 16:40:55 +08:00
}