合并新版本mesa_fuzzy(SFH)到maat,更节省内存。

This commit is contained in:
zhengchao
2016-04-06 10:41:59 +08:00
parent 72f6e611ed
commit 4a5cd85ecf
16 changed files with 2178 additions and 1048 deletions

View File

@@ -1322,7 +1322,7 @@ inline int REACH_QUERY_THRESH(unsigned long long total_len,unsigned long long ac
// {
// return 1;
// }
if(rate<(unsigned long long)(point_size+QUERY_MIN_RATE))
if(rate>(unsigned long long)(point_size+QUERY_MIN_RATE))
{
return 0;
}

View File

@@ -26,7 +26,7 @@
#include "mesa_fuzzy.h"
#include "great_index_engine.h"
int MAAT_FRAME_VERSION_1_6_20160403=1;
int MAAT_FRAME_VERSION_1_7_20160406=1;
const char *maat_module="MAAT Frame";
const char* CHARSET_STRING[]={"NONE","gbk","big5","unicode","utf8","bin",

View File

@@ -16,7 +16,7 @@ LIBMAAT = libmaatframe.a
LIBMAAT_SO = libmaatframe.so
OBJS=config_monitor.o Maat_rule.o Maat_api.o Maat_stat.o UniversalBoolMatch.o dynamic_array.o cJSON.o\
json2iris.o map_str2int.o interval_index.o great_index_engine.o mesa_fuzzy.o
json2iris.o map_str2int.o interval_index.o great_index_engine.o mesa_fuzzy.o rbtree.o
.c.o:
$(CC) -c $(CFLAGS) -I. $(H_DIR) $<

View File

@@ -1,7 +1,17 @@
#include<stdio.h>
#include<stdlib.h>
#include"interval_index.h"
/*********************************************************************
* File:
* interval_index.c
* Author:
* TangQi
* E-mail:
* tangqi@iie.ac.cn
*********************************************************************/
#include <stdio.h>
#include <stdlib.h>
#include <assert.h>
#include "interval_index.h"
#include "rbtree.h"
#include "rbtree_augmented.h"
/**
* There is a trick here. In order to hide specific
@@ -16,73 +26,113 @@
* Structure of inner segment
**/
typedef struct __IVI_shadow_seg_t{
IVI_seg_t lightseg;
TAILQ_ENTRY(__IVI_shadow_seg_t) ENTRY;
IVI_seg_t lightseg; /* interval for user, including left edge, right edge, and user's data */
struct rb_node rb; /* node of rb-tree */
OFFSET_TYPE max; /* max edge of subtree */
}IVI_shadow_seg_t;
TAILQ_HEAD(TQ, __IVI_shadow_seg_t);
/* Structure of inner InterVal Index */
typedef struct __IVI_shadow_t{
struct TQ ivi_queue;
struct rb_root root;
/* statistics */
int segs_cnt;
OFFSET_TYPE segs_length;
unsigned long long mem_occupy; //do not include user data
}IVI_shadow_t;
/**
* new is closer to head or tail ?
* Return 1 if closer to head than tail
* Else return 0
*/
int closer_to_head(IVI_shadow_seg_t * head, IVI_shadow_seg_t * tail, OFFSET_TYPE target)
IVI_seg_t * IVI_first_seg(IVI_t * handler)
{
if(head == NULL || tail == NULL)
return 1;
S_OFFSET_TYPE tmp1 = (S_OFFSET_TYPE)(target - head->lightseg.left);
S_OFFSET_TYPE tmp2 = (S_OFFSET_TYPE)(target - tail->lightseg.left);
S_OFFSET_TYPE distance_to_head = tmp1 > 0 ? tmp1 : -tmp1;
S_OFFSET_TYPE distance_to_tail = tmp2 > 0 ? tmp2 : -tmp2;
return (distance_to_tail - distance_to_head > 0);
assert(handler != NULL);
IVI_shadow_t * shadow_ivi = (IVI_shadow_t *)handler;
struct rb_node *first_node = rb_first(&(shadow_ivi->root));
if(first_node == NULL)
return NULL;
return (IVI_seg_t *)(rb_entry(first_node, IVI_shadow_seg_t, rb));
}
IVI_seg_t * IVI_last_seg(IVI_t * handler)
{
assert(handler != NULL);
IVI_shadow_t * shadow_ivi = (IVI_shadow_t *)handler;
struct rb_node *last_node = rb_last(&(shadow_ivi->root));
if(last_node == NULL)
return NULL;
return (IVI_seg_t *)(rb_entry(last_node, IVI_shadow_seg_t, rb));
}
IVI_seg_t * IVI_prev_seg(IVI_seg_t * seg)
{
assert(seg != NULL);
IVI_shadow_seg_t * shadow_seg = (IVI_shadow_seg_t *)seg;
struct rb_node * prev_node = rb_prev(&(shadow_seg->rb));
if(prev_node == NULL)
return NULL;
return (IVI_seg_t *)(rb_entry(prev_node, IVI_shadow_seg_t, rb));
}
IVI_seg_t * IVI_next_seg(IVI_seg_t * seg)
{
assert(seg != NULL);
IVI_shadow_seg_t * shadow_seg = (IVI_shadow_seg_t *)seg;
struct rb_node * next_node = rb_next(&(shadow_seg->rb));
if(next_node == NULL)
return NULL;
return (IVI_seg_t *)(rb_entry(next_node, IVI_shadow_seg_t, rb));
}
IVI_seg_t * IVI_prev_continuous_seg(IVI_seg_t * seg)
{
if(NULL == seg)
assert(seg != NULL);
IVI_shadow_seg_t * shadow_seg = (IVI_shadow_seg_t *)seg;
struct rb_node * prev_node = rb_prev(&(shadow_seg->rb));
if(prev_node == NULL)
{
return NULL;
}
IVI_shadow_seg_t * _seg = (IVI_shadow_seg_t *)seg;
IVI_shadow_seg_t * prev = TAILQ_PREV(_seg, TQ, ENTRY);
if(NULL == prev)
IVI_seg_t * prev_seg = (IVI_seg_t *)(rb_entry(prev_node, IVI_shadow_seg_t, rb));
if(!continuous(prev_seg->right, seg->left))
{
return NULL;
}
if(continuous((prev->lightseg).right, seg->left))
return (IVI_seg_t *)prev;
return NULL;
return prev_seg;
}
IVI_seg_t * IVI_next_continuous_seg(IVI_seg_t * seg)
{
if(NULL == seg)
assert(seg != NULL);
IVI_shadow_seg_t * shadow_seg = (IVI_shadow_seg_t *)seg;
struct rb_node * next_node = rb_next(&(shadow_seg->rb));
if(next_node == NULL)
{
return NULL;
}
IVI_shadow_seg_t * _seg = (IVI_shadow_seg_t *)seg;
IVI_shadow_seg_t * next = TAILQ_NEXT(_seg, ENTRY);
if(NULL == next)
IVI_seg_t * next_seg = (IVI_seg_t *)(rb_entry(next_node, IVI_shadow_seg_t, rb));
if(!continuous(seg->right, next_seg->left))
{
return NULL;
}
if(continuous(seg->right, (next->lightseg).left))
return (IVI_seg_t *)next;
return NULL;
return next_seg;
}
static inline int __is_overlapped(OFFSET_TYPE left1, OFFSET_TYPE right1, OFFSET_TYPE left2, OFFSET_TYPE right2)
{
if(!after(left1, right2) && !after(left2, right1))
return 1;
return 0;
}
@@ -152,13 +202,42 @@ Relation_t IVI_relative_position(IVI_seg_t * seg1, IVI_seg_t * seg2)
IVI_t * IVI_create(void)
{
IVI_shadow_t * shadow_ivi = (IVI_shadow_t *)malloc(sizeof(IVI_shadow_t));
TAILQ_INIT(&(shadow_ivi->ivi_queue));
shadow_ivi->root = RB_ROOT; //init rb tree's root
shadow_ivi->segs_cnt = 0;
shadow_ivi->segs_length = 0;
shadow_ivi->mem_occupy = sizeof(IVI_shadow_t);
return (IVI_t *)shadow_ivi;
}
static void __free_rb_tree(struct rb_node * root, IVI_callback_t cb, void * usr_para)
{
if(root == NULL)
{
return;
}
if(root->rb_left != NULL)
{
__free_rb_tree(root->rb_left, cb, usr_para);
}
if(root->rb_right != NULL)
{
__free_rb_tree(root->rb_right, cb, usr_para);
}
/* free user data */
IVI_shadow_seg_t * shadow_seg = rb_entry(root, IVI_shadow_seg_t, rb);
if(cb != NULL)
{
cb((IVI_seg_t *)shadow_seg, usr_para);
}
/* free seg */
free(shadow_seg);
shadow_seg = NULL;
return;
}
/**
* Name:
* IVI_destroy
@@ -177,26 +256,11 @@ void IVI_destroy(IVI_t * handler, IVI_callback_t cb, void * usr_para)
{
return;
}
IVI_shadow_t * shadow_ivi = (IVI_shadow_t *)handler;
IVI_shadow_seg_t * tmpseg = TAILQ_FIRST(&(shadow_ivi->ivi_queue));
IVI_shadow_seg_t * tmp;
/* Free each seg in IVI */
while(tmpseg != NULL)
{
tmp = TAILQ_NEXT(tmpseg, ENTRY);
/* Free *data in seg */
if(NULL != cb)
{
cb(&(tmpseg->lightseg), usr_para);
}
free(tmpseg);
tmpseg = tmp;
}
/* Free IVI */
__free_rb_tree(shadow_ivi->root.rb_node, cb, usr_para);
free(shadow_ivi);
handler = NULL;
return;
}
@@ -224,6 +288,7 @@ IVI_seg_t * IVI_seg_malloc(OFFSET_TYPE left, OFFSET_TYPE right, void * data)
shadow_seg->lightseg.left = left;
shadow_seg->lightseg.right= right;
shadow_seg->lightseg.data = data;
shadow_seg->max = 0;
return (IVI_seg_t *)shadow_seg;
}
@@ -244,6 +309,8 @@ IVI_seg_t * IVI_seg_malloc(OFFSET_TYPE left, OFFSET_TYPE right, void * data)
**/
void IVI_seg_free(IVI_seg_t * seg, IVI_callback_t cb, void * usr_para)
{
assert(seg != NULL);
/* Free user data first */
if(cb != NULL)
{
@@ -257,6 +324,70 @@ void IVI_seg_free(IVI_seg_t * seg, IVI_callback_t cb, void * usr_para)
}
static inline OFFSET_TYPE __interval_tree_get_subtree_max(IVI_shadow_seg_t * node)
{
OFFSET_TYPE max = node->lightseg.right, subtree_max;
if(node->rb.rb_left)
{
subtree_max = (rb_entry(node->rb.rb_left, IVI_shadow_seg_t, rb))->max;
if(before(max, subtree_max))
max = subtree_max;
}
if(node->rb.rb_right)
{
subtree_max = (rb_entry(node->rb.rb_right, IVI_shadow_seg_t, rb))->max;
if(before(max, subtree_max))
max = subtree_max;
}
return max;
}
static void __interval_tree_augment_propagate(struct rb_node * rb, struct rb_node * stop)
{
while(rb != stop)
{
IVI_shadow_seg_t * node = rb_entry(rb, IVI_shadow_seg_t, rb);
OFFSET_TYPE subtree_max = __interval_tree_get_subtree_max(node);
if(node->max == subtree_max)
{
break;
}
node->max = subtree_max;
rb = rb_parent(&node->rb);
}
return;
}
static void __interval_tree_augment_copy(struct rb_node * rb_old, struct rb_node * rb_new)
{
IVI_shadow_seg_t * old = rb_entry(rb_old, IVI_shadow_seg_t, rb);
IVI_shadow_seg_t * new = rb_entry(rb_new, IVI_shadow_seg_t, rb);
new->max = old->max;
return;
}
static void __interval_tree_augment_rotate(struct rb_node * rb_old, struct rb_node * rb_new)
{
IVI_shadow_seg_t * old = rb_entry(rb_old, IVI_shadow_seg_t, rb);
IVI_shadow_seg_t * new = rb_entry(rb_new, IVI_shadow_seg_t, rb);
new->max = old->max;
old->max = __interval_tree_get_subtree_max(old);
return;
}
static const struct rb_augment_callbacks __interval_tree_augment_callbacks = {
__interval_tree_augment_propagate,
__interval_tree_augment_copy,
__interval_tree_augment_rotate
};
/**
* Name:
* IVI_insert
@@ -273,74 +404,49 @@ void IVI_seg_free(IVI_seg_t * seg, IVI_callback_t cb, void * usr_para)
**/
int IVI_insert(IVI_t * handler, IVI_seg_t * seg)
{
IVI_shadow_t * shadow_ivi;
IVI_shadow_seg_t *head, *tail, *new_seg, *tmp_seg;
if(NULL == handler || NULL == seg)
{
return -1;
}
shadow_ivi = (IVI_shadow_t *)handler;
new_seg = (IVI_shadow_seg_t *)seg;
head = TAILQ_FIRST(&(shadow_ivi->ivi_queue));
tail = TAILQ_LAST(&(shadow_ivi->ivi_queue), TQ);
if(closer_to_head(head, tail, seg->left))
IVI_shadow_t * shadow_ivi = (IVI_shadow_t *)handler;
struct rb_root * root = &(shadow_ivi->root);
OFFSET_TYPE left = seg->left, right = seg->right;
struct rb_node **link = &root->rb_node, *rb_parent = NULL;
IVI_shadow_seg_t * parent = NULL;
IVI_shadow_seg_t * new_seg = (IVI_shadow_seg_t *)seg;
while(*link)
{
TAILQ_FOREACH(tmp_seg, &(shadow_ivi->ivi_queue), ENTRY)
{
/* Find the first seg whose left is bigger than given seg's right, we will insert new seg before it */
if(after(tmp_seg->lightseg.left, new_seg->lightseg.right))
{
TAILQ_INSERT_BEFORE(tmp_seg, new_seg, ENTRY);
shadow_ivi->segs_cnt ++;
shadow_ivi->segs_length += (seg->right - seg->left + 1);
return 0;
}
else if(before(tmp_seg->lightseg.right, new_seg->lightseg.left))
{
continue;
}
else /* Overlap */
rb_parent = *link;
parent = rb_entry(rb_parent, IVI_shadow_seg_t, rb);
/* is overlapped */
if(__is_overlapped(left, right, parent->lightseg.left, parent->lightseg.right))
{
//overlapped, return
return -1;
}
}
/* If have searched to the end of list, we will inset it to the tail */
TAILQ_INSERT_TAIL(&(shadow_ivi->ivi_queue), new_seg, ENTRY);
shadow_ivi->segs_cnt ++;
shadow_ivi->segs_length += (seg->right - seg->left + 1);
if(before(parent->max, right))
{
parent->max = right;
}
if(before(left, parent->lightseg.left))
{
link = &parent->rb.rb_left;
}
else
{
TAILQ_FOREACH_REVERSE(tmp_seg, &(shadow_ivi->ivi_queue), TQ, ENTRY)
{
/* Find the first seg whose right is smaller than given seg's left, we will insert new seg after it */
if(before(tmp_seg->lightseg.right, new_seg->lightseg.left))
{
TAILQ_INSERT_AFTER(&(shadow_ivi->ivi_queue), tmp_seg, new_seg, ENTRY);
shadow_ivi->segs_cnt ++;
shadow_ivi->segs_length += (seg->right - seg->left + 1);
return 0;
}
else if(after(tmp_seg->lightseg.left, new_seg->lightseg.right))
{
continue;
}
else /* Overlap */
{
return -1;
link = &parent->rb.rb_right;
}
}
new_seg->max = right;
rb_link_node(&new_seg->rb, rb_parent, link);
rb_insert_augmented(&new_seg->rb, root, &__interval_tree_augment_callbacks);
/* If have searched to the head of list, we will inset it to the head */
TAILQ_INSERT_HEAD(&(shadow_ivi->ivi_queue), new_seg, ENTRY);
/* updata statistics */
shadow_ivi->segs_cnt ++;
shadow_ivi->segs_length += (seg->right - seg->left + 1);
}
shadow_ivi->segs_length += seg->right - seg->left + 1;
shadow_ivi->mem_occupy += sizeof(IVI_shadow_seg_t);
return 0;
}
@@ -367,16 +473,56 @@ int IVI_remove(IVI_t * handler, IVI_seg_t * seg)
}
IVI_shadow_t * shadow_ivi = (IVI_shadow_t *)handler;
IVI_shadow_seg_t * shadow_seg = (IVI_shadow_seg_t *)seg;
struct rb_root * root = &(shadow_ivi->root);
IVI_shadow_seg_t * new_seg = (IVI_shadow_seg_t *)seg;
rb_erase_augmented(&new_seg->rb, root, &__interval_tree_augment_callbacks);
TAILQ_REMOVE(&(shadow_ivi->ivi_queue), shadow_seg, ENTRY);
/* updata statistics */
shadow_ivi->segs_cnt --;
shadow_ivi->segs_length -= (seg->right - seg->left + 1);
shadow_ivi->segs_length -= seg->right - seg->left + 1;
shadow_ivi->mem_occupy -= sizeof(IVI_shadow_seg_t);
return 0;
}
static struct rb_node * __min_interval_search_from(struct rb_node * node, OFFSET_TYPE left, OFFSET_TYPE right)
{
if(node == NULL)
{
return NULL;
}
IVI_shadow_seg_t * seg = rb_entry(node, IVI_shadow_seg_t, rb);
IVI_shadow_seg_t * left_seg = rb_entry(node->rb_left, IVI_shadow_seg_t, rb);
if(node->rb_left != NULL && !before(left_seg->max, left))
{
struct rb_node * ret = __min_interval_search_from(node->rb_left, left, right);
if(ret != NULL)
{
return ret;
}
else if(__is_overlapped(left, right, seg->lightseg.left, seg->lightseg.right))
{
return node;
}
else
{
return NULL;
}
}
else if(__is_overlapped(left, right, seg->lightseg.left, seg->lightseg.right))
{
return node;
}
else
{
return __min_interval_search_from(node->rb_right, left, right);
}
}
/**
* Name:
* IVI_query
@@ -397,115 +543,34 @@ int IVI_remove(IVI_t * handler, IVI_seg_t * seg)
**/
int IVI_query(IVI_t * handler, OFFSET_TYPE left, OFFSET_TYPE right, IVI_seg_t *** segs)
{
IVI_shadow_t * shadow_ivi;
IVI_shadow_seg_t *head, *tail, *tmp, *left_tmp, *right_tmp;
int interval_cnt = 0, i;
if(NULL == handler || after(left, right))
{
//augments error
return -1;
}
shadow_ivi = (IVI_shadow_t *)handler;
head = TAILQ_FIRST(&(shadow_ivi->ivi_queue));
tail = TAILQ_LAST(&(shadow_ivi->ivi_queue), TQ);
int interval_cnt = 0, max_cnt = 8;
IVI_shadow_t * shadow_ivi = (IVI_shadow_t *)handler;
struct rb_node * root = shadow_ivi->root.rb_node;
struct rb_node * min_overlap = __min_interval_search_from(root, left, right);
struct rb_node * tmp_node = min_overlap;
/* Traverse from head or tail? We need to decide */
if(closer_to_head(head, tail, left))
*segs = (IVI_seg_t **)malloc(max_cnt * sizeof(IVI_seg_t *));
while (tmp_node != NULL)
{
tmp = head;
while(tmp != NULL)
IVI_seg_t * tmp_seg = (IVI_seg_t *)(rb_entry(tmp_node, IVI_shadow_seg_t, rb));
if(!__is_overlapped(tmp_seg->left, tmp_seg->right, left, right))
{
if(after(left, tmp->lightseg.right))
{
tmp = TAILQ_NEXT(tmp, ENTRY);
}
else
{
/* Get the seg which left is in or before*/
left_tmp = tmp;
break;
}
if(interval_cnt > max_cnt)
{
max_cnt *= 2;
*segs = (IVI_seg_t **)realloc(*segs, max_cnt * sizeof(IVI_seg_t *));
}
if(tmp == NULL)
{
*segs = NULL;
return 0;
}
/* Get the num of overlapped segs */
while(tmp != NULL)
{
if(!before(right, tmp->lightseg.left))
{
tmp = TAILQ_NEXT(tmp, ENTRY);
(*segs)[interval_cnt] = tmp_seg;
interval_cnt ++;
}
else
{
break;
}
}
tmp = left_tmp;
if(interval_cnt == 0)
{
*segs = NULL;
return 0;
}
*segs = (IVI_seg_t **)malloc(interval_cnt * sizeof(IVI_seg_t *));
for(i = 0; i < interval_cnt; i++)
{
(*segs)[i] = (IVI_seg_t *)tmp;
tmp = TAILQ_NEXT(tmp, ENTRY);
}
}
else
{
tmp = tail;
while(tmp != NULL)
{
if(before(right, tmp->lightseg.left))
{
tmp = TAILQ_PREV(tmp, TQ, ENTRY);
}
else
{
right_tmp = tmp;
break;
}
}
if(tmp == NULL)
{
*segs = NULL;
return 0;
}
/* Get the num of overlapped segs */
while(tmp != NULL)
{
if(!after(left, tmp->lightseg.right))
{
tmp = TAILQ_PREV(tmp, TQ, ENTRY);
interval_cnt ++;
}
else
{
break;
}
}
tmp = right_tmp;
if(interval_cnt == 0)
{
*segs = NULL;
return 0;
}
*segs = (IVI_seg_t **)malloc(interval_cnt * sizeof(IVI_seg_t *));
for(i = interval_cnt - 1; i >= 0; i--)
{
(*segs)[i] = (IVI_seg_t *)tmp;
tmp = TAILQ_PREV(tmp, TQ, ENTRY);
}
tmp_node = rb_next(tmp_node);
}
return interval_cnt;
}
@@ -531,129 +596,41 @@ int IVI_query(IVI_t * handler, OFFSET_TYPE left, OFFSET_TYPE right, IVI_seg_t **
**/
int IVI_query_continuous(IVI_t * handler, OFFSET_TYPE left, OFFSET_TYPE right, IVI_seg_t *** segs)
{
IVI_shadow_t * shadow_ivi;
IVI_shadow_seg_t *head, *tail, *tmp, *left_tmp, *right_tmp;
int interval_cnt = 0, i;
if(NULL == handler || after(left, right))
{
//augments error
return -1;
}
shadow_ivi = (IVI_shadow_t *)handler;
head = TAILQ_FIRST(&(shadow_ivi->ivi_queue));
tail = TAILQ_LAST(&(shadow_ivi->ivi_queue), TQ);
int interval_cnt = 0, max_cnt = 8;
IVI_shadow_t * shadow_ivi = (IVI_shadow_t *)handler;
struct rb_node * root = shadow_ivi->root.rb_node;
struct rb_node * min_overlap = __min_interval_search_from(root, left, right);
struct rb_node * tmp_node = min_overlap;
/* Traverse from head or tail? We need to decide */
if(closer_to_head(head, tail, left))
*segs = (IVI_seg_t **)malloc(max_cnt * sizeof(IVI_seg_t *));
while (tmp_node != NULL)
{
tmp = head;
while(tmp != NULL)
IVI_seg_t * tmp_seg = (IVI_seg_t *)(rb_entry(tmp_node, IVI_shadow_seg_t, rb));
if(!__is_overlapped(tmp_seg->left, tmp_seg->right, left, right))
{
if(after(left, tmp->lightseg.right))
{
tmp = TAILQ_NEXT(tmp, ENTRY);
}
else
{
/* Get the seg which left is in or before*/
left_tmp = tmp;
break;
}
if(interval_cnt > max_cnt)
{
max_cnt += 8;
*segs = (IVI_seg_t **)realloc(*segs, max_cnt * sizeof(IVI_seg_t *));
}
if(tmp == NULL)
{
*segs = NULL;
return 0;
}
/* Get the num of overlapped segs */
while(tmp != NULL)
{
if(!before(right, tmp->lightseg.left))
{
tmp = TAILQ_NEXT(tmp, ENTRY);
(*segs)[interval_cnt] = tmp_seg;
interval_cnt ++;
}
else
{
break;
}
IVI_shadow_seg_t * prev = TAILQ_PREV(tmp, TQ, ENTRY);
if(tmp != NULL && !continuous(prev->lightseg.right, tmp->lightseg.left))
tmp_node = rb_next(tmp_node);
IVI_seg_t * prev_tmp_seg = tmp_seg;
tmp_seg = (IVI_seg_t *)(rb_entry(tmp_node, IVI_shadow_seg_t, rb));
if(!continuous(prev_tmp_seg->right, tmp_seg->left))
{
break;
}
}
tmp = left_tmp;
if(interval_cnt == 0)
{
*segs = NULL;
return 0;
}
*segs = (IVI_seg_t **)malloc(interval_cnt * sizeof(IVI_seg_t *));
for(i = 0; i < interval_cnt; i++)
{
(*segs)[i] = (IVI_seg_t *)tmp;
tmp = TAILQ_NEXT(tmp, ENTRY);
}
}
else
{
tmp = tail;
while(tmp != NULL)
{
if(before(right, tmp->lightseg.left))
{
tmp = TAILQ_PREV(tmp, TQ, ENTRY);
}
else
{
right_tmp = tmp;
break;
}
}
if(tmp == NULL)
{
*segs = NULL;
return 0;
}
/* Get the num of overlapped segs */
while(tmp != NULL)
{
if(!after(left, tmp->lightseg.right))
{
tmp = TAILQ_PREV(tmp, TQ, ENTRY);
interval_cnt ++;
}
else
{
break;
}
IVI_shadow_seg_t * next = TAILQ_NEXT(tmp, ENTRY);
if(tmp != NULL && !continuous(tmp->lightseg.right, next->lightseg.left))
{
break;
}
}
tmp = right_tmp;
if(interval_cnt == 0)
{
*segs = NULL;
return 0;
}
*segs = (IVI_seg_t **)malloc(interval_cnt * sizeof(IVI_seg_t *));
for(i = interval_cnt - 1; i >= 0; i--)
{
(*segs)[i] = (IVI_seg_t *)tmp;
tmp = TAILQ_PREV(tmp, TQ, ENTRY);
}
}
return interval_cnt;
}
@@ -698,6 +675,41 @@ OFFSET_TYPE IVI_seg_length(IVI_t * handler)
}
/**
* Name:
* IVI_mem_occupy
* Description:
* Get the memory occupy of given interval index handler
* Params:
* handler: The handler of InterVal Index created by IVI_create.
* Return:
* Return the memory occupy of given interval index handler
**/
unsigned long long IVI_mem_occupy(IVI_t * handler)
{
if(handler == NULL)
return 0;
IVI_shadow_t * shadow_ivi = (IVI_shadow_t *)handler;
return shadow_ivi->mem_occupy;
}
static void __inorder_traverse(struct rb_node * root, IVI_callback_t cb, void * usr_para)
{
if(root == NULL)
{
return;
}
/* save first in case of root is freed in callback */
struct rb_node * left_node = root->rb_left;
struct rb_node * right_node = root->rb_right;
__inorder_traverse(left_node, cb, usr_para);
IVI_seg_t * seg = (IVI_seg_t *)(rb_entry(root, IVI_shadow_seg_t, rb));
cb(seg, usr_para);
__inorder_traverse(right_node, cb, usr_para);
return;
}
/**
* Name:
@@ -720,17 +732,6 @@ void IVI_traverse(IVI_t * handler, IVI_callback_t cb, void * usr_para)
}
IVI_shadow_t * shadow_ivi = (IVI_shadow_t *)handler;
IVI_shadow_seg_t * tmp_seg = TAILQ_FIRST(&(shadow_ivi->ivi_queue));
IVI_shadow_seg_t * tmp;
/* Traverse the IVI */
while(tmp_seg != NULL)
{
/*
* The place we can't use TAILQ_FOREACH because we
* do not no what will callback funciton does.
* */
tmp = TAILQ_NEXT(tmp_seg, ENTRY);
cb((IVI_seg_t *)tmp_seg, usr_para);
tmp_seg = tmp;
}
__inorder_traverse(shadow_ivi->root.rb_node, cb, usr_para);
return;
}

View File

@@ -6,7 +6,7 @@
* (3) The interval supports rollback.
*
* author: zhengchao@iie.ac.cn tangqi@iie.ac.cn
* last modify time: 2015-08-29
* last modify time: 2015-12-04
*************************************************************************/
#ifndef _INTERVAL_INDEX_H_
@@ -16,7 +16,6 @@
extern "C"{
#endif
#include "queue.h"
#define SIZE_8
@@ -61,6 +60,10 @@ static inline int continuous(OFFSET_TYPE prev, OFFSET_TYPE next)
}
IVI_seg_t * IVI_first_seg(IVI_t * handler);
IVI_seg_t * IVI_last_seg(IVI_t * handler);
IVI_seg_t * IVI_prev_seg(IVI_seg_t * seg);
IVI_seg_t * IVI_next_seg(IVI_seg_t * seg);
IVI_seg_t * IVI_prev_continuous_seg(IVI_seg_t * seg);
IVI_seg_t * IVI_next_continuous_seg(IVI_seg_t * seg);
@@ -273,6 +276,18 @@ int IVI_seg_cnt(IVI_t * handler);
OFFSET_TYPE IVI_seg_length(IVI_t * handler);
/**
* Name:
* IVI_mem_occupy
* Description:
* Get the memory occupy of given interval index handler
* Params:
* handler: The handler of InterVal Index created by IVI_create.
* Return:
* Return the memory occupy of given interval index handler
**/
unsigned long long IVI_mem_occupy(IVI_t * handler);
/**
* Name:

File diff suppressed because it is too large Load Diff

548
src/entry/rbtree.c Normal file
View File

@@ -0,0 +1,548 @@
/*
Red Black Trees
(C) 1999 Andrea Arcangeli <andrea@suse.de>
(C) 2002 David Woodhouse <dwmw2@infradead.org>
(C) 2012 Michel Lespinasse <walken@google.com>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
linux/lib/rbtree.c
*/
#include "rbtree.h"
#include "rbtree_augmented.h"
/*
* red-black trees properties: http://en.wikipedia.org/wiki/Rbtree
*
* 1) A node is either red or black
* 2) The root is black
* 3) All leaves (NULL) are black
* 4) Both children of every red node are black
* 5) Every simple path from root to leaves contains the same number
* of black nodes.
*
* 4 and 5 give the O(log n) guarantee, since 4 implies you cannot have two
* consecutive red nodes in a path and every red node is therefore followed by
* a black. So if B is the number of black nodes on every simple path (as per
* 5), then the longest possible path due to 4 is 2B.
*
* We shall indicate color with case, where black nodes are uppercase and red
* nodes will be lowercase. Unknown color nodes shall be drawn as red within
* parentheses and have some accompanying text comment.
*/
static inline void rb_set_black(struct rb_node *rb)
{
rb->__rb_parent_color |= RB_BLACK;
}
static inline struct rb_node *rb_red_parent(struct rb_node *red)
{
return (struct rb_node *)red->__rb_parent_color;
}
/*
* Helper function for rotations:
* - old's parent and color get assigned to new
* - old gets assigned new as a parent and 'color' as a color.
*/
static inline void
__rb_rotate_set_parents(struct rb_node *old, struct rb_node *new,
struct rb_root *root, int color)
{
struct rb_node *parent = rb_parent(old);
new->__rb_parent_color = old->__rb_parent_color;
rb_set_parent_color(old, new, color);
__rb_change_child(old, new, parent, root);
}
static __always_inline void
__rb_insert(struct rb_node *node, struct rb_root *root,
void (*augment_rotate)(struct rb_node *old, struct rb_node *new))
{
struct rb_node *parent = rb_red_parent(node), *gparent, *tmp;
while (1) {
/*
* Loop invariant: node is red
*
* If there is a black parent, we are done.
* Otherwise, take some corrective action as we don't
* want a red root or two consecutive red nodes.
*/
if (!parent) {
rb_set_parent_color(node, NULL, RB_BLACK);
break;
} else if (rb_is_black(parent))
break;
gparent = rb_red_parent(parent);
tmp = gparent->rb_right;
if (parent != tmp) { /* parent == gparent->rb_left */
if (tmp && rb_is_red(tmp)) {
/*
* Case 1 - color flips
*
* G g
* / \ / \
* p u --> P U
* / /
* n n
*
* However, since g's parent might be red, and
* 4) does not allow this, we need to recurse
* at g.
*/
rb_set_parent_color(tmp, gparent, RB_BLACK);
rb_set_parent_color(parent, gparent, RB_BLACK);
node = gparent;
parent = rb_parent(node);
rb_set_parent_color(node, parent, RB_RED);
continue;
}
tmp = parent->rb_right;
if (node == tmp) {
/*
* Case 2 - left rotate at parent
*
* G G
* / \ / \
* p U --> n U
* \ /
* n p
*
* This still leaves us in violation of 4), the
* continuation into Case 3 will fix that.
*/
parent->rb_right = tmp = node->rb_left;
node->rb_left = parent;
if (tmp)
rb_set_parent_color(tmp, parent,
RB_BLACK);
rb_set_parent_color(parent, node, RB_RED);
augment_rotate(parent, node);
parent = node;
tmp = node->rb_right;
}
/*
* Case 3 - right rotate at gparent
*
* G P
* / \ / \
* p U --> n g
* / \
* n U
*/
gparent->rb_left = tmp; /* == parent->rb_right */
parent->rb_right = gparent;
if (tmp)
rb_set_parent_color(tmp, gparent, RB_BLACK);
__rb_rotate_set_parents(gparent, parent, root, RB_RED);
augment_rotate(gparent, parent);
break;
} else {
tmp = gparent->rb_left;
if (tmp && rb_is_red(tmp)) {
/* Case 1 - color flips */
rb_set_parent_color(tmp, gparent, RB_BLACK);
rb_set_parent_color(parent, gparent, RB_BLACK);
node = gparent;
parent = rb_parent(node);
rb_set_parent_color(node, parent, RB_RED);
continue;
}
tmp = parent->rb_left;
if (node == tmp) {
/* Case 2 - right rotate at parent */
parent->rb_left = tmp = node->rb_right;
node->rb_right = parent;
if (tmp)
rb_set_parent_color(tmp, parent,
RB_BLACK);
rb_set_parent_color(parent, node, RB_RED);
augment_rotate(parent, node);
parent = node;
tmp = node->rb_left;
}
/* Case 3 - left rotate at gparent */
gparent->rb_right = tmp; /* == parent->rb_left */
parent->rb_left = gparent;
if (tmp)
rb_set_parent_color(tmp, gparent, RB_BLACK);
__rb_rotate_set_parents(gparent, parent, root, RB_RED);
augment_rotate(gparent, parent);
break;
}
}
}
/*
* Inline version for rb_erase() use - we want to be able to inline
* and eliminate the dummy_rotate callback there
*/
static __always_inline void
____rb_erase_color(struct rb_node *parent, struct rb_root *root,
void (*augment_rotate)(struct rb_node *old, struct rb_node *new))
{
struct rb_node *node = NULL, *sibling, *tmp1, *tmp2;
while (1) {
/*
* Loop invariants:
* - node is black (or NULL on first iteration)
* - node is not the root (parent is not NULL)
* - All leaf paths going through parent and node have a
* black node count that is 1 lower than other leaf paths.
*/
sibling = parent->rb_right;
if (node != sibling) { /* node == parent->rb_left */
if (rb_is_red(sibling)) {
/*
* Case 1 - left rotate at parent
*
* P S
* / \ / \
* N s --> p Sr
* / \ / \
* Sl Sr N Sl
*/
parent->rb_right = tmp1 = sibling->rb_left;
sibling->rb_left = parent;
rb_set_parent_color(tmp1, parent, RB_BLACK);
__rb_rotate_set_parents(parent, sibling, root,
RB_RED);
augment_rotate(parent, sibling);
sibling = tmp1;
}
tmp1 = sibling->rb_right;
if (!tmp1 || rb_is_black(tmp1)) {
tmp2 = sibling->rb_left;
if (!tmp2 || rb_is_black(tmp2)) {
/*
* Case 2 - sibling color flip
* (p could be either color here)
*
* (p) (p)
* / \ / \
* N S --> N s
* / \ / \
* Sl Sr Sl Sr
*
* This leaves us violating 5) which
* can be fixed by flipping p to black
* if it was red, or by recursing at p.
* p is red when coming from Case 1.
*/
rb_set_parent_color(sibling, parent,
RB_RED);
if (rb_is_red(parent))
rb_set_black(parent);
else {
node = parent;
parent = rb_parent(node);
if (parent)
continue;
}
break;
}
/*
* Case 3 - right rotate at sibling
* (p could be either color here)
*
* (p) (p)
* / \ / \
* N S --> N Sl
* / \ \
* sl Sr s
* \
* Sr
*/
sibling->rb_left = tmp1 = tmp2->rb_right;
tmp2->rb_right = sibling;
parent->rb_right = tmp2;
if (tmp1)
rb_set_parent_color(tmp1, sibling,
RB_BLACK);
augment_rotate(sibling, tmp2);
tmp1 = sibling;
sibling = tmp2;
}
/*
* Case 4 - left rotate at parent + color flips
* (p and sl could be either color here.
* After rotation, p becomes black, s acquires
* p's color, and sl keeps its color)
*
* (p) (s)
* / \ / \
* N S --> P Sr
* / \ / \
* (sl) sr N (sl)
*/
parent->rb_right = tmp2 = sibling->rb_left;
sibling->rb_left = parent;
rb_set_parent_color(tmp1, sibling, RB_BLACK);
if (tmp2)
rb_set_parent(tmp2, parent);
__rb_rotate_set_parents(parent, sibling, root,
RB_BLACK);
augment_rotate(parent, sibling);
break;
} else {
sibling = parent->rb_left;
if (rb_is_red(sibling)) {
/* Case 1 - right rotate at parent */
parent->rb_left = tmp1 = sibling->rb_right;
sibling->rb_right = parent;
rb_set_parent_color(tmp1, parent, RB_BLACK);
__rb_rotate_set_parents(parent, sibling, root,
RB_RED);
augment_rotate(parent, sibling);
sibling = tmp1;
}
tmp1 = sibling->rb_left;
if (!tmp1 || rb_is_black(tmp1)) {
tmp2 = sibling->rb_right;
if (!tmp2 || rb_is_black(tmp2)) {
/* Case 2 - sibling color flip */
rb_set_parent_color(sibling, parent,
RB_RED);
if (rb_is_red(parent))
rb_set_black(parent);
else {
node = parent;
parent = rb_parent(node);
if (parent)
continue;
}
break;
}
/* Case 3 - right rotate at sibling */
sibling->rb_right = tmp1 = tmp2->rb_left;
tmp2->rb_left = sibling;
parent->rb_left = tmp2;
if (tmp1)
rb_set_parent_color(tmp1, sibling,
RB_BLACK);
augment_rotate(sibling, tmp2);
tmp1 = sibling;
sibling = tmp2;
}
/* Case 4 - left rotate at parent + color flips */
parent->rb_left = tmp2 = sibling->rb_right;
sibling->rb_right = parent;
rb_set_parent_color(tmp1, sibling, RB_BLACK);
if (tmp2)
rb_set_parent(tmp2, parent);
__rb_rotate_set_parents(parent, sibling, root,
RB_BLACK);
augment_rotate(parent, sibling);
break;
}
}
}
/* Non-inline version for rb_erase_augmented() use */
void __rb_erase_color(struct rb_node *parent, struct rb_root *root,
void (*augment_rotate)(struct rb_node *old, struct rb_node *new))
{
____rb_erase_color(parent, root, augment_rotate);
}
/*
* Non-augmented rbtree manipulation functions.
*
* We use dummy augmented callbacks here, and have the compiler optimize them
* out of the rb_insert_color() and rb_erase() function definitions.
*/
static inline void dummy_propagate(struct rb_node *node, struct rb_node *stop) {}
static inline void dummy_copy(struct rb_node *old, struct rb_node *new) {}
static inline void dummy_rotate(struct rb_node *old, struct rb_node *new) {}
static const struct rb_augment_callbacks dummy_callbacks = {
dummy_propagate, dummy_copy, dummy_rotate
};
void rb_insert_color(struct rb_node *node, struct rb_root *root)
{
__rb_insert(node, root, dummy_rotate);
}
void rb_erase(struct rb_node *node, struct rb_root *root)
{
struct rb_node *rebalance;
rebalance = __rb_erase_augmented(node, root, &dummy_callbacks);
if (rebalance)
____rb_erase_color(rebalance, root, dummy_rotate);
}
/*
* Augmented rbtree manipulation functions.
*
* This instantiates the same __always_inline functions as in the non-augmented
* case, but this time with user-defined callbacks.
*/
void __rb_insert_augmented(struct rb_node *node, struct rb_root *root,
void (*augment_rotate)(struct rb_node *old, struct rb_node *new))
{
__rb_insert(node, root, augment_rotate);
}
/*
* This function returns the first node (in sort order) of the tree.
*/
struct rb_node *rb_first(const struct rb_root *root)
{
struct rb_node *n;
n = root->rb_node;
if (!n)
return NULL;
while (n->rb_left)
n = n->rb_left;
return n;
}
struct rb_node *rb_last(const struct rb_root *root)
{
struct rb_node *n;
n = root->rb_node;
if (!n)
return NULL;
while (n->rb_right)
n = n->rb_right;
return n;
}
struct rb_node *rb_next(const struct rb_node *node)
{
struct rb_node *parent;
if (RB_EMPTY_NODE(node))
return NULL;
/*
* If we have a right-hand child, go down and then left as far
* as we can.
*/
if (node->rb_right) {
node = node->rb_right;
while (node->rb_left)
node=node->rb_left;
return (struct rb_node *)node;
}
/*
* No right-hand children. Everything down and left is smaller than us,
* so any 'next' node must be in the general direction of our parent.
* Go up the tree; any time the ancestor is a right-hand child of its
* parent, keep going up. First time it's a left-hand child of its
* parent, said parent is our 'next' node.
*/
while ((parent = rb_parent(node)) && node == parent->rb_right)
node = parent;
return parent;
}
struct rb_node *rb_prev(const struct rb_node *node)
{
struct rb_node *parent;
if (RB_EMPTY_NODE(node))
return NULL;
/*
* If we have a left-hand child, go down and then right as far
* as we can.
*/
if (node->rb_left) {
node = node->rb_left;
while (node->rb_right)
node=node->rb_right;
return (struct rb_node *)node;
}
/*
* No left-hand children. Go up till we find an ancestor which
* is a right-hand child of its parent.
*/
while ((parent = rb_parent(node)) && node == parent->rb_left)
node = parent;
return parent;
}
void rb_replace_node(struct rb_node *victim, struct rb_node *new,
struct rb_root *root)
{
struct rb_node *parent = rb_parent(victim);
/* Set the surrounding nodes to point to the replacement */
__rb_change_child(victim, new, parent, root);
if (victim->rb_left)
rb_set_parent(victim->rb_left, new);
if (victim->rb_right)
rb_set_parent(victim->rb_right, new);
/* Copy the pointers/colour from the victim to the replacement */
*new = *victim;
}
static struct rb_node *rb_left_deepest_node(const struct rb_node *node)
{
for (;;) {
if (node->rb_left)
node = node->rb_left;
else if (node->rb_right)
node = node->rb_right;
else
return (struct rb_node *)node;
}
}
struct rb_node *rb_next_postorder(const struct rb_node *node)
{
const struct rb_node *parent;
if (!node)
return NULL;
parent = rb_parent(node);
/* If we're sitting on node, we've already seen our children */
if (parent && node == parent->rb_left && parent->rb_right) {
/* If we are the parent's left node, go to the parent's right
* node then all the way down to the left */
return rb_left_deepest_node(parent->rb_right);
} else
/* Otherwise we are the parent's right node, and the parent
* should be next */
return (struct rb_node *)parent;
}
struct rb_node *rb_first_postorder(const struct rb_root *root)
{
if (!root->rb_node)
return NULL;
return rb_left_deepest_node(root->rb_node);
}

118
src/entry/rbtree.h Normal file
View File

@@ -0,0 +1,118 @@
/*
Red Black Trees
(C) 1999 Andrea Arcangeli <andrea@suse.de>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
linux/include/linux/rbtree.h
To use rbtrees you'll have to implement your own insert and search cores.
This will avoid us to use callbacks and to drop drammatically performances.
I know it's not the cleaner way, but in C (not in C++) to get
performances and genericity...
See Documentation/rbtree.txt for documentation and samples.
*/
#ifndef _LINUX_RBTREE_H
#define _LINUX_RBTREE_H
#include <stdio.h>
struct rb_node {
unsigned long __rb_parent_color;
struct rb_node *rb_right;
struct rb_node *rb_left;
} __attribute__((aligned(sizeof(long))));
/* The alignment might seem pointless, but allegedly CRIS needs it */
struct rb_root {
struct rb_node *rb_node;
};
#define rb_parent(r) ((struct rb_node *)((r)->__rb_parent_color & ~3))
#define RB_ROOT (struct rb_root) { NULL, }
#ifndef offsetof
#define offsetof(TYPE, MEMBER) ((size_t) &((TYPE *)0)->MEMBER)
#endif
#ifndef container_of
#define container_of(ptr, type, member) ({ \
const typeof( ((type *)0)->member ) *__mptr = (ptr); \
(type *)( (char *)__mptr - offsetof(type, member) );})
#endif
#define rb_entry(ptr, type, member) container_of(ptr, type, member)
#define RB_EMPTY_ROOT(root) ((root)->rb_node == NULL)
/* 'empty' nodes are nodes that are known not to be inserted in an rbtree */
#define RB_EMPTY_NODE(node) \
((node)->__rb_parent_color == (unsigned long)(node))
#define RB_CLEAR_NODE(node) \
((node)->__rb_parent_color = (unsigned long)(node))
extern void rb_insert_color(struct rb_node *, struct rb_root *);
extern void rb_erase(struct rb_node *, struct rb_root *);
/* Find logical next and previous nodes in a tree */
extern struct rb_node *rb_next(const struct rb_node *);
extern struct rb_node *rb_prev(const struct rb_node *);
extern struct rb_node *rb_first(const struct rb_root *);
extern struct rb_node *rb_last(const struct rb_root *);
/* Postorder iteration - always visit the parent after its children */
extern struct rb_node *rb_first_postorder(const struct rb_root *);
extern struct rb_node *rb_next_postorder(const struct rb_node *);
/* Fast replacement of a single node without remove/rebalance/add/rebalance */
extern void rb_replace_node(struct rb_node *victim, struct rb_node *new,
struct rb_root *root);
static inline void rb_link_node(struct rb_node * node, struct rb_node * parent,
struct rb_node ** rb_link)
{
node->__rb_parent_color = (unsigned long)parent;
node->rb_left = node->rb_right = NULL;
*rb_link = node;
}
#define rb_entry_safe(ptr, type, member) \
({ typeof(ptr) ____ptr = (ptr); \
____ptr ? rb_entry(____ptr, type, member) : NULL; \
})
/**
* rbtree_postorder_for_each_entry_safe - iterate over rb_root in post order of
* given type safe against removal of rb_node entry
*
* @pos: the 'type *' to use as a loop cursor.
* @n: another 'type *' to use as temporary storage
* @root: 'rb_root *' of the rbtree.
* @field: the name of the rb_node field within 'type'.
*/
#define rbtree_postorder_for_each_entry_safe(pos, n, root, field) \
for (pos = rb_entry_safe(rb_first_postorder(root), typeof(*pos), field); \
pos && ({ n = rb_entry_safe(rb_next_postorder(&pos->field), \
typeof(*pos), field); 1; }); \
pos = n)
#endif /* _LINUX_RBTREE_H */

View File

@@ -0,0 +1,241 @@
/*
Red Black Trees
(C) 1999 Andrea Arcangeli <andrea@suse.de>
(C) 2002 David Woodhouse <dwmw2@infradead.org>
(C) 2012 Michel Lespinasse <walken@google.com>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
linux/include/linux/rbtree_augmented.h
*/
#ifndef _LINUX_RBTREE_AUGMENTED_H
#define _LINUX_RBTREE_AUGMENTED_H
#include "rbtree.h"
/*
* Please note - only struct rb_augment_callbacks and the prototypes for
* rb_insert_augmented() and rb_erase_augmented() are intended to be public.
* The rest are implementation details you are not expected to depend on.
*
* See Documentation/rbtree.txt for documentation and samples.
*/
struct rb_augment_callbacks {
void (*propagate)(struct rb_node *node, struct rb_node *stop);
void (*copy)(struct rb_node *old, struct rb_node *new);
void (*rotate)(struct rb_node *old, struct rb_node *new);
};
extern void __rb_insert_augmented(struct rb_node *node, struct rb_root *root,
void (*augment_rotate)(struct rb_node *old, struct rb_node *new));
/*
* Fixup the rbtree and update the augmented information when rebalancing.
*
* On insertion, the user must update the augmented information on the path
* leading to the inserted node, then call rb_link_node() as usual and
* rb_augment_inserted() instead of the usual rb_insert_color() call.
* If rb_augment_inserted() rebalances the rbtree, it will callback into
* a user provided function to update the augmented information on the
* affected subtrees.
*/
static inline void
rb_insert_augmented(struct rb_node *node, struct rb_root *root,
const struct rb_augment_callbacks *augment)
{
__rb_insert_augmented(node, root, augment->rotate);
}
#define RB_DECLARE_CALLBACKS(rbstatic, rbname, rbstruct, rbfield, \
rbtype, rbaugmented, rbcompute) \
static inline void \
rbname ## _propagate(struct rb_node *rb, struct rb_node *stop) \
{ \
while (rb != stop) { \
rbstruct *node = rb_entry(rb, rbstruct, rbfield); \
rbtype augmented = rbcompute(node); \
if (node->rbaugmented == augmented) \
break; \
node->rbaugmented = augmented; \
rb = rb_parent(&node->rbfield); \
} \
} \
static inline void \
rbname ## _copy(struct rb_node *rb_old, struct rb_node *rb_new) \
{ \
rbstruct *old = rb_entry(rb_old, rbstruct, rbfield); \
rbstruct *new = rb_entry(rb_new, rbstruct, rbfield); \
new->rbaugmented = old->rbaugmented; \
} \
static void \
rbname ## _rotate(struct rb_node *rb_old, struct rb_node *rb_new) \
{ \
rbstruct *old = rb_entry(rb_old, rbstruct, rbfield); \
rbstruct *new = rb_entry(rb_new, rbstruct, rbfield); \
new->rbaugmented = old->rbaugmented; \
old->rbaugmented = rbcompute(old); \
} \
rbstatic const struct rb_augment_callbacks rbname = { \
rbname ## _propagate, rbname ## _copy, rbname ## _rotate \
};
#define RB_RED 0
#define RB_BLACK 1
#define __rb_parent(pc) ((struct rb_node *)(pc & ~3))
#define __rb_color(pc) ((pc) & 1)
#define __rb_is_black(pc) __rb_color(pc)
#define __rb_is_red(pc) (!__rb_color(pc))
#define rb_color(rb) __rb_color((rb)->__rb_parent_color)
#define rb_is_red(rb) __rb_is_red((rb)->__rb_parent_color)
#define rb_is_black(rb) __rb_is_black((rb)->__rb_parent_color)
static inline void rb_set_parent(struct rb_node *rb, struct rb_node *p)
{
rb->__rb_parent_color = rb_color(rb) | (unsigned long)p;
}
static inline void rb_set_parent_color(struct rb_node *rb,
struct rb_node *p, int color)
{
rb->__rb_parent_color = (unsigned long)p | color;
}
static inline void
__rb_change_child(struct rb_node *old, struct rb_node *new,
struct rb_node *parent, struct rb_root *root)
{
if (parent) {
if (parent->rb_left == old)
parent->rb_left = new;
else
parent->rb_right = new;
} else
root->rb_node = new;
}
extern void __rb_erase_color(struct rb_node *parent, struct rb_root *root,
void (*augment_rotate)(struct rb_node *old, struct rb_node *new));
static __always_inline struct rb_node *
__rb_erase_augmented(struct rb_node *node, struct rb_root *root,
const struct rb_augment_callbacks *augment)
{
struct rb_node *child = node->rb_right, *tmp = node->rb_left;
struct rb_node *parent, *rebalance;
unsigned long pc;
if (!tmp) {
/*
* Case 1: node to erase has no more than 1 child (easy!)
*
* Note that if there is one child it must be red due to 5)
* and node must be black due to 4). We adjust colors locally
* so as to bypass __rb_erase_color() later on.
*/
pc = node->__rb_parent_color;
parent = __rb_parent(pc);
__rb_change_child(node, child, parent, root);
if (child) {
child->__rb_parent_color = pc;
rebalance = NULL;
} else
rebalance = __rb_is_black(pc) ? parent : NULL;
tmp = parent;
} else if (!child) {
/* Still case 1, but this time the child is node->rb_left */
tmp->__rb_parent_color = pc = node->__rb_parent_color;
parent = __rb_parent(pc);
__rb_change_child(node, tmp, parent, root);
rebalance = NULL;
tmp = parent;
} else {
struct rb_node *successor = child, *child2;
tmp = child->rb_left;
if (!tmp) {
/*
* Case 2: node's successor is its right child
*
* (n) (s)
* / \ / \
* (x) (s) -> (x) (c)
* \
* (c)
*/
parent = successor;
child2 = successor->rb_right;
augment->copy(node, successor);
} else {
/*
* Case 3: node's successor is leftmost under
* node's right child subtree
*
* (n) (s)
* / \ / \
* (x) (y) -> (x) (y)
* / /
* (p) (p)
* / /
* (s) (c)
* \
* (c)
*/
do {
parent = successor;
successor = tmp;
tmp = tmp->rb_left;
} while (tmp);
parent->rb_left = child2 = successor->rb_right;
successor->rb_right = child;
rb_set_parent(child, successor);
augment->copy(node, successor);
augment->propagate(parent, successor);
}
successor->rb_left = tmp = node->rb_left;
rb_set_parent(tmp, successor);
pc = node->__rb_parent_color;
tmp = __rb_parent(pc);
__rb_change_child(node, successor, tmp, root);
if (child2) {
successor->__rb_parent_color = pc;
rb_set_parent_color(child2, parent, RB_BLACK);
rebalance = NULL;
} else {
unsigned long pc2 = successor->__rb_parent_color;
successor->__rb_parent_color = pc;
rebalance = __rb_is_black(pc2) ? parent : NULL;
}
tmp = successor;
}
augment->propagate(tmp, NULL);
return rebalance;
}
static __always_inline void
rb_erase_augmented(struct rb_node *node, struct rb_root *root,
const struct rb_augment_callbacks *augment)
{
struct rb_node *rebalance = __rb_erase_augmented(node, root, augment);
if (rebalance)
__rb_erase_color(rebalance, root, augment->rotate);
}
#endif /* _LINUX_RBTREE_AUGMENTED_H */

106
src/entry/sfh_internal.h Normal file
View File

@@ -0,0 +1,106 @@
#include<zt_hash.h>
#include<interval_index.h>
#include<mesa_fuzzy.h>
#ifndef __SFH_INTERNAL_H_INCLUDE_
#define __SFH_INTERNAL_H_INCLUDE_
#define ROLLING_WINDOW 7
#define BLOCKSIZE_MIN 3
#define HASH_PRIME 0x01000193
#define HASH_INIT 0x28021967
#define CALCULATE 0
#define MODIFY 1
#define EXPECT_SIGNATURE_LEN 64
#define MEMORY_OCCUPY 3
#ifndef MAX
#define MAX(a, b) (((a) > (b)) ? (a) : (b))
#endif
#ifndef MIN
#define MIN(a, b) (((a) < (b)) ? (a) : (b))
#endif
#ifndef container_of
#define container_of(ptr, type, member) ({ \
const typeof( ((type *)0)->member ) *__mptr = (ptr); \
(type *)( (char *)__mptr - offsetof(type,member) );})
#endif
#define DEBUG (0)
int hash_length;
//int count = 0;
struct roll_state_t
{
unsigned char window[ROLLING_WINDOW];
unsigned char pad[1];
unsigned int h1, h2, h3;
unsigned int n;
};
typedef struct
{
char mbuf[ROLLING_WINDOW-1];
char pad[8-ROLLING_WINDOW+1];
int slice_num;
unsigned int msize;
struct zt_state_t ps; //partial strong hash value
struct zt_state_t s_state;//strong hash state
unsigned long long left_offset;
unsigned long long right_offset;
struct roll_state_t r_state;
unsigned int * r_array; //array to store rolling hash value
unsigned int r_cnt;
unsigned int r_size;
struct zt_state_t * s_array; //array to store strong(Tillichi-Zemor) hash value
unsigned int s_cnt; //always point to the next available position
unsigned int s_size;
}sfh_seg_t;
typedef struct
{
unsigned long long orilen;
IVI_t * ivi; //ÿһ<C3BF><D2BB>handle<6C><65><EFBFBD><EFBFBD><E6B1A3>һ<EFBFBD><D2BB>IVIָ<49>һ<EBA3AC><D2BB>IVI<56><49><EFBFBD><EFBFBD><E6B1A3><EFBFBD><EFBFBD><EFBFBD><EFBFBD>һ<EFBFBD><D2BB><EFBFBD>ļ<EFBFBD><C4BC><EFBFBD><EFBFBD><EFBFBD>Ƭ
unsigned long long effective_length;
unsigned long long blocksize;
unsigned long long fuzzy_node_memory;
unsigned long long IVI_memory;
unsigned long long length_increase;
int s_state_cnt;
unsigned int sim_tuned_rs_cnt;//rolling state count after a tune simulation
int do_tune;
}fuzzy_handle_inner_t;
typedef struct
{
char * data; //<2F><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>char<61><72><EFBFBD><EFBFBD>
unsigned int size;
unsigned int offset; //<2F><><EFBFBD><EFBFBD><E9B3A4>
unsigned long long first_ZTH_offset;
unsigned long long last_ZTH_offset;
char last_char;
}final_result;
typedef struct
{
unsigned long long first_ZTH_offset;
unsigned long long last_ZTH_offset;
unsigned long long hash_length;
}final_length;
sfh_seg_t* create_sfh_seg(fuzzy_handle_inner_t * _handle);
int destroy_sfh_seg(sfh_seg_t*p);
unsigned long long get_blocksize(unsigned long long orilen);
int sfh_merge_seg(fuzzy_handle_inner_t * _handle,sfh_seg_t * seg, sfh_seg_t * next_seg, unsigned long long blocksize);
int sfh_update_seg(fuzzy_handle_inner_t * _handle,sfh_seg_t * p, const char * data, unsigned long data_size, unsigned long long blocksize);
unsigned int segment_overlap(fuzzy_handle_inner_t * handle, unsigned int size, unsigned long long offset, const char * data);
void sfh_tune_seg(IVI_seg_t * seg, void * user_para);
void sfh_output_state(IVI_seg_t * seg, void * user_para);
void fuzzy_hash_length(IVI_seg_t * seg, void * user_para);
unsigned long long fuzzy_status(fuzzy_handle_t * handle, int type);
#endif

234
src/entry/zt_hash.h Normal file

File diff suppressed because one or more lines are too long

View File

@@ -2,6 +2,6 @@ LIBS=../lib/libmaatframe.so
INC=-I../inc/ -I/usr/include/MESA/
all:
g++ -o maat_test -g -Wall maat_test.cpp $(INC) $(LIBS)
g++ -o digest_gen -g digest_gen.c -I../src/inc_internal/ ../lib/libmaatframe.so
g++ -o digest_gen -g digest_gen.c -I../src/entry/ ../lib/libmaatframe.so
clean:
rm maat_test test.log* -f

View File

@@ -6,7 +6,7 @@
#include<sys/stat.h>
#include<time.h>
#include<math.h>
#include "mesa_fuzzy.h"
#include "../entry/mesa_fuzzy.h"
void* entropy_start(void)
{

View File

@@ -181,7 +181,7 @@
"table_type": "digest",
"table_content": {
"raw_len": 1160164,
"digest": "FsOmaK3utZafWYt/i[7203:46299992]",
"digest": "12288:UChtbFS6pypdTy4m2[0:1160163]",
"cfds_level": 3
}
}