diff --git a/plugins/simdht/index.py b/plugins/simdht/index.py
index d390eb7bf..c6b199038 100755
--- a/plugins/simdht/index.py
+++ b/plugins/simdht/index.py
@@ -18,6 +18,9 @@ def getPluginName():
def getPluginDir():
return public.getPluginDir() + '/' + getPluginName()
+sys.path.append(getPluginDir() + "/class")
+import mysql
+
def getServerDir():
return public.getServerDir() + '/' + getPluginName()
@@ -46,6 +49,13 @@ def getArgs():
return tmp
+def checkArgs(data, ck=[]):
+ for i in range(len(ck)):
+ if not ck[i] in data:
+ return (False, public.returnJson(False, '参数:(' + ck[i] + ')没有!'))
+ return (True, public.returnJson(True, 'ok'))
+
+
def getInitDTpl():
path = getPluginDir() + "/init.d/" + getPluginName() + ".tpl"
return path
@@ -131,6 +141,77 @@ def reload():
return 'fail'
+def matchData(reg, content):
+ tmp = re.search(reg, content).groups()
+ return tmp[0]
+
+
+def getDbConfInfo():
+ cfg = getDbConf()
+ content = public.readFile(cfg)
+ data = {}
+ data['DB_HOST'] = matchData("DB_HOST\s*=\s(.*)", content)
+ data['DB_USER'] = matchData("DB_USER\s*=\s(.*)", content)
+ data['DB_PORT'] = matchData("DB_PORT\s*=\s(.*)", content)
+ data['DB_PASS'] = matchData("DB_PASS\s*=\s(.*)", content)
+ data['DB_NAME'] = matchData("DB_NAME\s*=\s(.*)", content)
+ return data
+
+
+def pMysqlDb():
+ data = getDbConfInfo()
+ conn = mysql.mysql()
+ conn.setHost(data['DB_HOST'])
+ conn.setUser(data['DB_USER'])
+ conn.setPwd(data['DB_PASS'])
+ conn.setPort(int(data['DB_PORT']))
+ conn.setDb(data['DB_NAME'])
+ return conn
+
+
+def isSqlError(mysqlMsg):
+ # 检测数据库执行错误
+ mysqlMsg = str(mysqlMsg)
+ if "MySQLdb" in mysqlMsg:
+ return public.returnJson(False, 'MySQLdb组件缺失!
进入SSH命令行输入: pip install mysql-python')
+ if "2002," in mysqlMsg:
+ return public.returnJson(False, '数据库连接失败,请检查数据库服务是否启动!')
+ if "using password:" in mysqlMsg:
+ return public.returnJson(False, '数据库管理密码错误!')
+ if "Connection refused" in mysqlMsg:
+ return public.returnJson(False, '数据库连接失败,请检查数据库服务是否启动!')
+ if "1133" in mysqlMsg:
+ return public.returnJson(False, '数据库用户不存在!')
+ if "1007" in mysqlMsg:
+ return public.returnJson(False, '数据库已经存在!')
+ return None
+
+
+def getMinData(conn, min):
+ pre = time.strftime("%Y-%m-%d %H:%M:%S",
+ time.localtime(time.time() - min))
+ sql = "select count(*) from search_hash where create_time > '" + pre + "'"
+ data = conn.query(sql)
+ return data[0][0]
+
+
+def getTrendData():
+ import time
+ args = getArgs()
+ data = checkArgs(args, ['interval'])
+ if not data[0]:
+ return data[1]
+ pdb = pMysqlDb()
+ interval = int(args['interval'])
+ result = pdb.execute("show tables")
+ isError = isSqlError(result)
+ if isError:
+ return isError
+ one = getMinData(pdb, interval)
+ two = getMinData(pdb, interval * 2)
+ three = getMinData(pdb, interval * 3)
+ return public.getJson([one, two, three])
+
if __name__ == "__main__":
func = sys.argv[1]
if func == 'status':
@@ -149,5 +230,7 @@ if __name__ == "__main__":
print getDbConf()
elif func == 'get_run_Log':
print getRunLog()
+ elif func == 'get_trend_data':
+ print getTrendData()
else:
print 'error'
diff --git a/plugins/simdht/install.sh b/plugins/simdht/install.sh
index b4503f3c9..c16069fcd 100755
--- a/plugins/simdht/install.sh
+++ b/plugins/simdht/install.sh
@@ -11,6 +11,8 @@ serverPath=$(dirname "$rootPath")
install_tmp=${rootPath}/tmp/bt_install.pl
+pip install pygeoip
+pip install pytz
Install_dht()
{
diff --git a/plugins/simdht/js/simdht.js b/plugins/simdht/js/simdht.js
index c9487a1ed..450053cfc 100755
--- a/plugins/simdht/js/simdht.js
+++ b/plugins/simdht/js/simdht.js
@@ -1,17 +1,14 @@
-function dhtPost(method,version, args,callback){
- var loadT = layer.msg('正在获取...', { icon: 16, time: 0, shade: 0.3 });
+function dhtPostMin(method, args, callback){
var req_data = {};
- req_data['name'] = 'dht';
+ req_data['name'] = 'simdht';
req_data['func'] = method;
- req_data['version'] = version;
if (typeof(args) != 'undefined' && args!=''){
req_data['args'] = JSON.stringify(args);
}
$.post('/plugins/run', req_data, function(data) {
- layer.close(loadT);
if (!data.status){
layer.msg(data.msg,{icon:0,time:2000,shade: [0.3, '#000']});
return;
@@ -22,3 +19,200 @@ function dhtPost(method,version, args,callback){
}
},'json');
}
+
+function dhtPost(method, args, callback){
+ var loadT = layer.msg('正在获取...', { icon: 16, time: 0, shade: 0.3 });
+ dhtPostMin(method,args,function(data){
+ layer.close(loadT);
+ if(typeof(callback) == 'function'){
+ callback(data);
+ }
+ });
+}
+
+
+function dhtTrend(){
+ var trend = '
';
+ $('.soft-man-con').html(trend);
+ dhtTrendRender();
+}
+
+function dhtTrendData(callback){
+ dhtPostMin('get_trend_data',{interval:5},function(data){
+ if(typeof(callback) == 'function'){
+ callback(data);
+ }
+ });
+}
+
+
+function dhtTrendRender() {
+ var myChartNetwork = echarts.init(document.getElementById('dht_trend'));
+ var xData = [];
+ var yData = [];
+ var zData = [];
+
+ function getTime() {
+ var now = new Date();
+ var hour = now.getHours();
+ var minute = now.getMinutes();
+ var second = now.getSeconds();
+ if (minute < 10) {
+ minute = "0" + minute;
+ }
+ if (second < 10) {
+ second = "0" + second;
+ }
+ var nowdate = hour + ":" + minute + ":" + second;
+ return nowdate;
+ }
+
+ function ts(m) { return m < 10 ? '0' + m : m }
+
+ function format(sjc) {
+ var time = new Date(sjc);
+ var h = time.getHours();
+ var mm = time.getMinutes();
+ var s = time.getSeconds();
+ return ts(h) + ':' + ts(mm) + ':' + ts(s);
+ }
+
+ function addData(data) {
+ console.log(data);
+ xData.push(getTime());
+ yData.push(data[0]);
+ zData.push(data[1]);
+ // if (shift) {
+ // xData.shift();
+ // yData.shift();
+ // zData.shift();
+ // }
+ }
+ for (var i = 8; i >= 0; i--) {
+ var time = (new Date()).getTime();
+ xData.push(format(time - (i * 3 * 1000)));
+ yData.push(0);
+ zData.push(0);
+ }
+ // 指定图表的配置项和数据
+ var option = {
+ title: {
+ text: lan.index.interface_net,
+ left: 'center',
+ textStyle: {
+ color: '#888888',
+ fontStyle: 'normal',
+ fontFamily: lan.index.net_font,
+ fontSize: 16,
+ }
+ },
+ tooltip: {
+ trigger: 'axis'
+ },
+ legend: {
+ data: [lan.index.net_up, lan.index.net_down],
+ bottom: '2%'
+ },
+ xAxis: {
+ type: 'category',
+ boundaryGap: false,
+ data: xData,
+ axisLine: {
+ lineStyle: {
+ color: "#666"
+ }
+ }
+ },
+ yAxis: {
+ name: lan.index.unit + 'KB/s',
+ splitLine: {
+ lineStyle: {
+ color: "#eee"
+ }
+ },
+ axisLine: {
+ lineStyle: {
+ color: "#666"
+ }
+ }
+ },
+ series: [{
+ name: lan.index.net_up,
+ type: 'line',
+ data: yData,
+ smooth: true,
+ showSymbol: false,
+ symbol: 'circle',
+ symbolSize: 6,
+ areaStyle: {
+ normal: {
+ color: new echarts.graphic.LinearGradient(0, 0, 0, 1, [{
+ offset: 0,
+ color: 'rgba(255, 140, 0,0.5)'
+ }, {
+ offset: 1,
+ color: 'rgba(255, 140, 0,0.8)'
+ }], false)
+ }
+ },
+ itemStyle: {
+ normal: {
+ color: '#f7b851'
+ }
+ },
+ lineStyle: {
+ normal: {
+ width: 1
+ }
+ }
+ }, {
+ name: lan.index.net_down,
+ type: 'line',
+ data: zData,
+ smooth: true,
+ showSymbol: false,
+ symbol: 'circle',
+ symbolSize: 6,
+ areaStyle: {
+ normal: {
+ color: new echarts.graphic.LinearGradient(0, 0, 0, 1, [{
+ offset: 0,
+ color: 'rgba(30, 144, 255,0.5)'
+ }, {
+ offset: 1,
+ color: 'rgba(30, 144, 255,0.8)'
+ }], false)
+ }
+ },
+ itemStyle: {
+ normal: {
+ color: '#52a9ff'
+ }
+ },
+ lineStyle: {
+ normal: {
+ width: 1
+ }
+ }
+ }]
+ };
+ setInterval(function() {
+ dhtTrendData(function(data){
+ addData(data);
+ });
+ myChartNetwork.setOption({
+ xAxis: {data: xData},
+ series: [
+ {name: '5s',data: yData},
+ {name: '10s',data: zData}
+ ]
+ });
+ }, 5000);
+ // 使用刚指定的配置项和数据显示图表。
+ myChartNetwork.setOption(option);
+ window.addEventListener("resize", function() {
+ myChartNetwork.resize();
+ });
+}
+
+
diff --git a/plugins/simdht/workers/metadata.py b/plugins/simdht/workers/metadata.py
index 9da270004..d3dc7420d 100755
--- a/plugins/simdht/workers/metadata.py
+++ b/plugins/simdht/workers/metadata.py
@@ -9,10 +9,17 @@ import datetime
import time
import json
+
import metautils
from bencode import bencode, bdecode
geoip = pygeoip.GeoIP('GeoIP.dat')
+# setting time
+import pytz
+pytz.timezone('Asia/Shanghai')
+# print datetime.datetime.utcnow()
+
+
def decode(encoding, s):
if type(s) is list:
s = ';'.join(s)
@@ -25,11 +32,13 @@ def decode(encoding, s):
pass
return s.decode(encoding, 'ignore')
+
def decode_utf8(encoding, d, i):
- if i+'.utf-8' in d:
- return d[i+'.utf-8'].decode('utf8')
+ if i + '.utf-8' in d:
+ return d[i + '.utf-8'].decode('utf8')
return decode(encoding, d[i])
+
def parse_metadata(data):
info = {}
encoding = 'utf8'
@@ -40,9 +49,10 @@ def parse_metadata(data):
except:
return None
try:
- info['create_time'] = datetime.datetime.fromtimestamp(float(torrent['creation date']))
+ info['create_time'] = datetime.datetime.fromtimestamp(
+ float(torrent['creation date']))
except:
- info['create_time'] = datetime.datetime.utcnow()
+ info['create_time'] = datetime.datetime.now()
if torrent.get('encoding'):
encoding = torrent['encoding']
@@ -58,7 +68,7 @@ def parse_metadata(data):
info['creator'] = decode_utf8(encoding, torrent, 'created by')[:15]
if 'info' in torrent:
- detail = torrent['info']
+ detail = torrent['info']
else:
detail = torrent
info['name'] = decode_utf8(encoding, detail, 'name')
@@ -66,9 +76,11 @@ def parse_metadata(data):
info['files'] = []
for x in detail['files']:
if 'path.utf-8' in x:
- v = {'path': decode(encoding, '/'.join(x['path.utf-8'])), 'length': x['length']}
+ v = {'path': decode(
+ encoding, '/'.join(x['path.utf-8'])), 'length': x['length']}
else:
- v = {'path': decode(encoding, '/'.join(x['path'])), 'length': x['length']}
+ v = {'path': decode(
+ encoding, '/'.join(x['path'])), 'length': x['length']}
if 'filehash' in x:
v['filehash'] = x['filehash'].encode('hex')
info['files'].append(v)
@@ -82,7 +94,7 @@ def parse_metadata(data):
def save_metadata(dbcurr, binhash, address, start_time, data, blacklist):
- utcnow = datetime.datetime.utcnow()
+ utcnow = datetime.datetime.now()
name = threading.currentThread().getName()
try:
info = parse_metadata(data)
@@ -109,34 +121,35 @@ def save_metadata(dbcurr, binhash, address, start_time, data, blacklist):
files = info['files']
else:
files = [{'path': info['name'], 'length': info['length']}]
- files.sort(key=lambda z:z['length'], reverse=True)
+ files.sort(key=lambda z: z['length'], reverse=True)
bigfname = files[0]['path']
info['extension'] = metautils.get_extension(bigfname).lower()
info['category'] = metautils.get_category(info['extension'])
if 'files' in info:
try:
- dbcurr.execute('INSERT INTO search_filelist VALUES(%s, %s)', (info['info_hash'], json.dumps(info['files'])))
+ dbcurr.execute('INSERT INTO search_filelist VALUES(%s, %s)', (info[
+ 'info_hash'], json.dumps(info['files'])))
except:
print name, 'insert error', sys.exc_info()[1]
del info['files']
try:
try:
- print '\n', 'Saved', info['info_hash'], info['name'], (time.time()-start_time), 's', address[0], geoip.country_name_by_addr(address[0]),
+ print '\n', 'Saved', info['info_hash'], info['name'], (time.time() - start_time), 's', address[0], geoip.country_name_by_addr(address[0]),
except:
print '\n', 'Saved', info['info_hash'], sys.exc_info()[1]
try:
- ret = dbcurr.execute('INSERT INTO search_hash(info_hash,category,data_hash,name,extension,classified,source_ip,tagged,' +
- 'length,create_time,last_seen,requests,comment,creator) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)',
- (info['info_hash'], info['category'], info['data_hash'], info['name'], info['extension'], info['classified'],
- info['source_ip'], info['tagged'], info['length'], info['create_time'], info['last_seen'], info['requests'],
- info.get('comment',''), info.get('creator','')))
+ ret = dbcurr.execute('INSERT INTO search_hash(info_hash,category,data_hash,name,extension,classified,source_ip,tagged,' +
+ 'length,create_time,last_seen,requests,comment,creator) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)',
+ (info['info_hash'], info['category'], info['data_hash'], info['name'], info['extension'], info['classified'],
+ info['source_ip'], info['tagged'], info['length'], info[
+ 'create_time'], info['last_seen'], info['requests'],
+ info.get('comment', ''), info.get('creator', '')))
except:
- print 'insert search_hash err: ',info['info_hash']
+ print 'insert search_hash err: ', info['info_hash']
dbcurr.connection.commit()
except:
print name, 'save error', info
traceback.print_exc()
return
-
diff --git a/route/templates/default/index.html b/route/templates/default/index.html
index 5c205fbbe..3f4e5ece2 100755
--- a/route/templates/default/index.html
+++ b/route/templates/default/index.html
@@ -120,8 +120,6 @@