fixed size bug, which in some cases display the total size instead of the average + few cleanup + commented out log scale in moduleStats.js and added large number division in case of div by 0

pull/65/head
Mokaddem 2016-07-29 16:52:50 +02:00
parent a827e8ca3a
commit feb1717dc6
3 changed files with 77 additions and 47 deletions

View File

@ -16,7 +16,6 @@ from packages import Paste
# Config Var
max_set_cardinality = 7
num_day_to_look = 5
def get_date_range(num_day):
curr_date = datetime.date.today()
@ -28,24 +27,24 @@ def get_date_range(num_day):
return date_list
def compute_most_posted(server, message, num_day):
def compute_most_posted(server, message):
module, num, keyword, paste_date = message.split(';')
redis_progression_name_set = 'top_'+ module +'_set'
# Add/Update in Redis
prev_score = server.hget(paste_date, module+'-'+keyword)
if prev_score is not None:
server.hset(paste_date, module+'-'+keyword, int(prev_score) + int(num))
ok = server.hset(paste_date, module+'-'+keyword, int(prev_score) + int(num))
else:
server.hset(paste_date, module+'-'+keyword, int(num))
ok = server.hset(paste_date, module+'-'+keyword, int(num))
# Compute Most Posted
date_range = get_date_range(num_day)
date = get_date_range(0)
# check if this keyword is eligible for progression
keyword_total_sum = 0
for date in date_range:
curr_value = server.hget(date, module+'-'+keyword)
keyword_total_sum += int(curr_value) if curr_value is not None else 0
curr_value = server.hget(date, module+'-'+keyword)
keyword_total_sum += int(curr_value) if curr_value is not None else 0
if keyword in server.smembers(redis_progression_name_set): # if it is already in the set
return
@ -69,7 +68,7 @@ def compute_most_posted(server, message, num_day):
server.sadd(redis_progression_name_set, keyword)
def compute_provider_info(server, path, num_day_to_look):
def compute_provider_info(server, path):
redis_avg_size_name_set = 'top_size_set'
redis_providers_name_set = 'providers_set'
@ -81,48 +80,65 @@ def compute_provider_info(server, path, num_day_to_look):
new_avg = paste_size
# Add/Update in Redis
server.sadd(redis_providers_name_set, paste_provider)
prev_num_paste = server.hget(paste_provider+'_num', paste_date)
if prev_num_paste is not None:
server.hset(paste_provider+'_num', paste_date, int(prev_num_paste)+1)
ok = server.hset(paste_provider+'_num', paste_date, int(prev_num_paste)+1)
prev_sum_size = server.hget(paste_provider+'_size', paste_date)
if prev_sum_size is not None:
server.hset(paste_provider+'_size', paste_date, paste_size)
ok = server.hset(paste_provider+'_size', paste_date, float(prev_sum_size)+paste_size)
new_avg = (float(prev_sum_size)+paste_size) / (int(prev_num_paste)+1)
else:
server.hset(paste_provider+'_size', paste_date, paste_size)
ok = server.hset(paste_provider+'_size', paste_date, paste_size)
else:
server.hset(paste_provider+'_num', paste_date, 1)
ok = server.hset(paste_provider+'_num', paste_date, 1)
#
# Compute Most Posted
# check if this keyword is eligible for progression
#
if paste_provider in server.smembers(redis_avg_size_name_set): # if it is already in the set
return
elif (server.scard(redis_avg_size_name_set) < max_set_cardinality):
server.sadd(redis_avg_size_name_set, paste_provider)
else: #set full capacity
#Check value for all members
member_set = []
for provider in server.smembers(redis_avg_size_name_set):
curr_avg = 0.0
curr_size = server.hget(provider+'_size', paste_date)
curr_num = server.hget(provider+'_num', paste_date)
if (curr_size is not None) and (curr_num is not None):
curr_avg += float(curr_size) / float(curr_num)
member_set.append((provider, curr_avg))
member_set.sort(key=lambda tup: tup[1])
if member_set[0][1] < new_avg:
#remove min from set and add the new one
print 'Adding ' +paste_provider+ '(' +str(new_avg)+') in set and removing '+member_set[0][0]+'('+str(member_set[0][1])+')'
server.srem(redis_avg_size_name_set, member_set[0][0])
# Size
if paste_provider not in server.smembers(redis_avg_size_name_set): # if it is already in the set
if (server.scard(redis_avg_size_name_set) < max_set_cardinality):
server.sadd(redis_avg_size_name_set, paste_provider)
else: #set full capacity
#Check value for all members
member_set = []
for provider in server.smembers(redis_avg_size_name_set):
curr_avg = 0.0
curr_size = server.hget(provider+'_size', paste_date)
curr_num = server.hget(provider+'_num', paste_date)
if (curr_size is not None) and (curr_num is not None):
curr_avg = float(curr_size) / float(curr_num)
member_set.append((provider, curr_avg))
member_set.sort(key=lambda tup: tup[1])
if member_set[0][1] < new_avg:
#remove min from set and add the new one
print 'Size - adding ' +paste_provider+ '(' +str(new_avg)+') in set and removing '+member_set[0][0]+'('+str(member_set[0][1])+')'
server.srem(redis_avg_size_name_set, member_set[0][0])
server.sadd(redis_avg_size_name_set, paste_provider)
# Num
if paste_provider not in server.smembers(redis_providers_name_set): # if it is already in the set
if (server.scard(redis_providers_name_set) < max_set_cardinality):
server.sadd(redis_providers_name_set, paste_provider)
else: #set full capacity
#Check value for all members
member_set = []
for provider in server.smembers(redis_providers_name_set):
curr_num = server.hget(provider+'_num', paste_date)
member_set.append((provider, int(curr_num)))
member_set.sort(key=lambda tup: tup[1])
print '------------------------------'
print member_set
if member_set[0][1] < int(prev_num_paste)+1:
#remove min from set and add the new one
print 'Num - adding ' +paste_provider+ '(' +str(int(prev_num_paste)+1)+') in set and removing '+member_set[0][0]+'('+str(member_set[0][1])+')'
server.srem(redis_providers_name_set, member_set[0][0])
server.sadd(redis_providers_name_set, paste_provider)
if __name__ == '__main__':
# If you wish to use an other port of channel, do not forget to run a subscriber accordingly (see launch_logs.sh)
@ -160,6 +176,6 @@ if __name__ == '__main__':
else:
# Do something with the message from the queue
if len(message.split(';')) > 1:
compute_most_posted(r_serv_trend, message, num_day_to_look)
compute_most_posted(r_serv_trend, message)
else:
compute_provider_info(r_serv_trend, message, num_day_to_look)
compute_provider_info(r_serv_trend, message)

View File

@ -219,8 +219,16 @@ def providersChart():
date_range = get_date_range(num_day)
# Retreive all data from the last num_day
for date in date_range:
curr_value = r_serv_charts.hget(keyword_name+'_'+module_name, date)
bar_values.append([date[0:4]+'/'+date[4:6]+'/'+date[6:8], float(curr_value if curr_value is not None else 0.0)])
curr_value_size = r_serv_charts.hget(keyword_name+'_'+'size', date)
curr_value_num = r_serv_charts.hget(keyword_name+'_'+'num', date)
if module_name == "size":
curr_value_num = curr_value_num if curr_value_num is not None else 0
curr_value_num = curr_value_num if int(curr_value_num) != 0 else 10000000000
curr_value = float(curr_value_size if curr_value_size is not None else 0.0) / float(curr_value_num)
else:
curr_value = float(curr_value_num if curr_value_num is not None else 0.0)
bar_values.append([date[0:4]+'/'+date[4:6]+'/'+date[6:8], curr_value])
bar_values.insert(0, keyword_name)
return jsonify(bar_values)
@ -230,10 +238,16 @@ def providersChart():
# Iterate over element in top_x_set and retreive their value
member_set = []
for keyw in r_serv_charts.smembers(redis_provider_name_set):
redis_provider_name = keyw+'_'+module_name
keyw_value = r_serv_charts.hget(redis_provider_name, get_date_range(0)[0])
keyw_value = keyw_value if keyw_value is not None else 0.0
member_set.append((keyw, float(keyw_value)))
redis_provider_name_size = keyw+'_'+'size'
redis_provider_name_num = keyw+'_'+'num'
keyw_value_size = r_serv_charts.hget(redis_provider_name_size, get_date_range(0)[0])
keyw_value_size = keyw_value_size if keyw_value_size is not None else 0.0
keyw_value_num = r_serv_charts.hget(redis_provider_name_num, get_date_range(0)[0])
keyw_value_num = keyw_value_num if keyw_value_num is not None else 0.0
if module_name == "size":
member_set.append((keyw, float(keyw_value_size)/float(keyw_value_num)))
else:
member_set.append((keyw, float(keyw_value_num)))
member_set.sort(key=lambda tup: tup[1], reverse=True)
if len(member_set) == 0:
member_set.append(("No relevant data", float(100)))

View File

@ -143,7 +143,7 @@ function plot_top_graph(module_name, init){
for(i=1; i<data.length; i++){
var curr_date = data[i][0].split('/');
var offset = (data_other.length/2 - data_other.indexOf(data[0]))*10000000
temp_data_bar.push([new Date(curr_date[0], curr_date[1]-1, curr_date[2]).getTime() + offset, data[i][1]]);
temp_data_bar.push([new Date(curr_date[0], curr_date[1]-1, curr_date[2]).getTime() + offset, data[i][1].toFixed(2)]);
}
// Insert temp_data_bar in order so that color and alignement correspond for the provider graphs
all_other_temp_data.splice(data_other.indexOf(data[0]), 0, [ data[0], temp_data_bar, data_other.indexOf(data[0])]);
@ -172,7 +172,7 @@ function plot_top_graph(module_name, init){
minTickSize: [1, "day"]
},
yaxis: {
transform: function (v) { return v < 1 ? v : Math.log(v); }
//transform: function (v) { return v < 1 ? v : Math.log(v); }
},
grid: { hoverable: true },
legend: { show: true,
@ -210,7 +210,7 @@ function plot_top_graph(module_name, init){
var temp_data_bar = []
for(i=1; i<data.length; i++){
var curr_date = data[i][0].split('/');
temp_data_bar.push([new Date(curr_date[0], curr_date[1]-1, curr_date[2]).getTime(), data[i][1]]);
temp_data_bar.push([new Date(curr_date[0], curr_date[1]-1, curr_date[2]).getTime(), data[i][1].toFixed(2)]);
}
var barData = {
label: involved_item,