⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 old_colamd.c

📁 SuperLU 2.2版本。对大型、稀疏、非对称的线性系统的直接求解
💻 C
📖 第 1 页 / 共 5 页
字号:
	    while (cp < cp_end)	    {		/* get a row */		row = *cp++ ;		row_mark = Row [row].shared2.mark ;		/* skip if dead */		if (ROW_IS_MARKED_DEAD (row_mark))		{		    continue ;		}		assert (row != pivot_row) ;		set_difference = row_mark - tag_mark ;		/* check if the row has been seen yet */		if (set_difference < 0)		{		    assert (Row [row].shared1.degree <= max_deg) ;		    set_difference = Row [row].shared1.degree ;		}		/* subtract column thickness from this row's set difference */		set_difference -= col_thickness ;		assert (set_difference >= 0) ;		/* absorb this row if the set difference becomes zero */		if (set_difference == 0)		{		    DEBUG1 (("aggressive absorption. Row: %d\n", row)) ;		    KILL_ROW (row) ;		}		else		{		    /* save the new mark */		    Row [row].shared2.mark = set_difference + tag_mark ;		}	    }	}#ifndef NDEBUG	debug_deg_lists (n_row, n_col, Row, Col, head,		min_score, n_col2-k-pivot_row_degree, max_deg) ;#endif	/* === Add up set differences for each column ======================= */	DEBUG1 (("** Adding set differences phase. **\n")) ;	/* for each column in pivot row */	rp = &A [pivot_row_start] ;	rp_end = rp + pivot_row_length ;	while (rp < rp_end)	{	    /* get a column */	    col = *rp++ ;	    assert (COL_IS_ALIVE (col) && col != pivot_col) ;	    hash = 0 ;	    cur_score = 0 ;	    cp = &A [Col [col].start] ;	    /* compact the column */	    new_cp = cp ;	    cp_end = cp + Col [col].length ;	    DEBUG2 (("Adding set diffs for Col: %d.\n", col)) ;	    while (cp < cp_end)	    {		/* get a row */		row = *cp++ ;		assert(row >= 0 && row < n_row) ;		row_mark = Row [row].shared2.mark ;		/* skip if dead */		if (ROW_IS_MARKED_DEAD (row_mark))		{		    continue ;		}		assert (row_mark > tag_mark) ;		/* compact the column */		*new_cp++ = row ;		/* compute hash function */		hash += row ;		/* add set difference */		cur_score += row_mark - tag_mark ;		/* integer overflow... */		cur_score = MIN (cur_score, n_col) ;	    }	    /* recompute the column's length */	    Col [col].length = (int) (new_cp - &A [Col [col].start]) ;	    /* === Further mass elimination ================================= */	    if (Col [col].length == 0)	    {		DEBUG1 (("further mass elimination. Col: %d\n", col)) ;		/* nothing left but the pivot row in this column */		KILL_PRINCIPAL_COL (col) ;		pivot_row_degree -= Col [col].shared1.thickness ;		assert (pivot_row_degree >= 0) ;		/* order it */		Col [col].shared2.order = k ;		/* increment order count by column thickness */		k += Col [col].shared1.thickness ;	    }	    else	    {		/* === Prepare for supercolumn detection ==================== */		DEBUG2 (("Preparing supercol detection for Col: %d.\n", col)) ;		/* save score so far */		Col [col].shared2.score = cur_score ;		/* add column to hash table, for supercolumn detection */		hash %= n_col + 1 ;		DEBUG2 ((" Hash = %d, n_col = %d.\n", hash, n_col)) ;		assert (hash <= n_col) ;		head_column = head [hash] ;		if (head_column > EMPTY)		{		    /* degree list "hash" is non-empty, use prev (shared3) of */		    /* first column in degree list as head of hash bucket */		    first_col = Col [head_column].shared3.headhash ;		    Col [head_column].shared3.headhash = col ;		}		else		{		    /* degree list "hash" is empty, use head as hash bucket */		    first_col = - (head_column + 2) ;		    head [hash] = - (col + 2) ;		}		Col [col].shared4.hash_next = first_col ;		/* save hash function in Col [col].shared3.hash */		Col [col].shared3.hash = (int) hash ;		assert (COL_IS_ALIVE (col)) ;	    }	}	/* The approximate external column degree is now computed.  */	/* === Supercolumn detection ======================================== */	DEBUG1 (("** Supercolumn detection phase. **\n")) ;	detect_super_cols (#ifndef NDEBUG		n_col, Row,#endif		Col, A, head, pivot_row_start, pivot_row_length) ;	/* === Kill the pivotal column ====================================== */	KILL_PRINCIPAL_COL (pivot_col) ;	/* === Clear mark =================================================== */	tag_mark += (max_deg + 1) ;	if (tag_mark >= max_mark)	{	    DEBUG1 (("clearing tag_mark\n")) ;	    tag_mark = clear_mark (n_row, Row) ;	}#ifndef NDEBUG	DEBUG3 (("check3\n")) ;	debug_mark (n_row, Row, tag_mark, max_mark) ;#endif	/* === Finalize the new pivot row, and column scores ================ */	DEBUG1 (("** Finalize scores phase. **\n")) ;	/* for each column in pivot row */	rp = &A [pivot_row_start] ;	/* compact the pivot row */	new_rp = rp ;	rp_end = rp + pivot_row_length ;	while (rp < rp_end)	{	    col = *rp++ ;	    /* skip dead columns */	    if (COL_IS_DEAD (col))	    {		continue ;	    }	    *new_rp++ = col ;	    /* add new pivot row to column */	    A [Col [col].start + (Col [col].length++)] = pivot_row ;	    /* retrieve score so far and add on pivot row's degree. */	    /* (we wait until here for this in case the pivot */	    /* row's degree was reduced due to mass elimination). */	    cur_score = Col [col].shared2.score + pivot_row_degree ;	    /* calculate the max possible score as the number of */	    /* external columns minus the 'k' value minus the */	    /* columns thickness */	    max_score = n_col - k - Col [col].shared1.thickness ;	    /* make the score the external degree of the union-of-rows */	    cur_score -= Col [col].shared1.thickness ;	    /* make sure score is less or equal than the max score */	    cur_score = MIN (cur_score, max_score) ;	    assert (cur_score >= 0) ;	    /* store updated score */	    Col [col].shared2.score = cur_score ;	    /* === Place column back in degree list ========================= */	    assert (min_score >= 0) ;	    assert (min_score <= n_col) ;	    assert (cur_score >= 0) ;	    assert (cur_score <= n_col) ;	    assert (head [cur_score] >= EMPTY) ;	    next_col = head [cur_score] ;	    Col [col].shared4.degree_next = next_col ;	    Col [col].shared3.prev = EMPTY ;	    if (next_col != EMPTY)	    {		Col [next_col].shared3.prev = col ;	    }	    head [cur_score] = col ;	    /* see if this score is less than current min */	    min_score = MIN (min_score, cur_score) ;	}#ifndef NDEBUG	debug_deg_lists (n_row, n_col, Row, Col, head,		min_score, n_col2-k, max_deg) ;#endif	/* === Resurrect the new pivot row ================================== */	if (pivot_row_degree > 0)	{	    /* update pivot row length to reflect any cols that were killed */	    /* during super-col detection and mass elimination */	    Row [pivot_row].start  = pivot_row_start ;	    Row [pivot_row].length = (int) (new_rp - &A[pivot_row_start]) ;	    Row [pivot_row].shared1.degree = pivot_row_degree ;	    Row [pivot_row].shared2.mark = 0 ;	    /* pivot row is no longer dead */	}    }    /* === All principal columns have now been ordered ====================== */    return (ngarbage) ;}/* ========================================================================== *//* === order_children ======================================================= *//* ========================================================================== *//*    The find_ordering routine has ordered all of the principal columns (the    representatives of the supercolumns).  The non-principal columns have not    yet been ordered.  This routine orders those columns by walking up the    parent tree (a column is a child of the column which absorbed it).  The    final permutation vector is then placed in p [0 ... n_col-1], with p [0]    being the first column, and p [n_col-1] being the last.  It doesn't look    like it at first glance, but be assured that this routine takes time linear    in the number of columns.  Although not immediately obvious, the time    taken by this routine is O (n_col), that is, linear in the number of    columns.  Not user-callable.*/PRIVATE void order_children(    /* === Parameters ======================================================= */    int n_col,			/* number of columns of A */    ColInfo Col [],		/* of size n_col+1 */    int p []			/* p [0 ... n_col-1] is the column permutation*/){    /* === Local variables ================================================== */    int i ;			/* loop counter for all columns */    int c ;			/* column index */    int parent ;		/* index of column's parent */    int order ;			/* column's order */    /* === Order each non-principal column ================================== */    for (i = 0 ; i < n_col ; i++)    {	/* find an un-ordered non-principal column */	assert (COL_IS_DEAD (i)) ;	if (!COL_IS_DEAD_PRINCIPAL (i) && Col [i].shared2.order == EMPTY)	{	    parent = i ;	    /* once found, find its principal parent */	    do	    {		parent = Col [parent].shared1.parent ;	    } while (!COL_IS_DEAD_PRINCIPAL (parent)) ;	    /* now, order all un-ordered non-principal columns along path */	    /* to this parent.  collapse tree at the same time */	    c = i ;	    /* get order of parent */	    order = Col [parent].shared2.order ;	    do	    {		assert (Col [c].shared2.order == EMPTY) ;		/* order this column */		Col [c].shared2.order = order++ ;		/* collaps tree */		Col [c].shared1.parent = parent ;		/* get immediate parent of this column */		c = Col [c].shared1.parent ;		/* continue until we hit an ordered column.  There are */		/* guarranteed not to be anymore unordered columns */		/* above an ordered column */	    } while (Col [c].shared2.order == EMPTY) ;	    /* re-order the super_col parent to largest order for this group */	    Col [parent].shared2.order = order ;	}    }    /* === Generate the permutation ========================================= */    for (c = 0 ; c < n_col ; c++)    {	p [Col [c].shared2.order] = c ;    }}/* ========================================================================== *//* === detect_super_cols ==================================================== *//* ========================================================================== *//*    Detects supercolumns by finding matches between columns in the hash buckets.    Check amongst columns in the set A [row_start ... row_start + row_length-1].    The columns under consideration are currently *not* in the degree lists,    and have already been placed in the hash buckets.    The hash bucket for columns whose hash function is equal to h is stored    as follows:	if head [h] is >= 0, then head [h] contains a degree list, so:		head [h] is the first column in degree bucket h.		Col [head [h]].headhash gives the first column in hash bucket h.	otherwise, the degree list is empty, and:		-(head [h] + 2) is the first column in hash bucket h.    For a column c in a hash bucket, Col [c].shared3.prev is NOT a "previous    column" pointer.  Col [c].shared3.hash is used instead as the hash number    for that column.  The value of Col [c].shared4.hash_next is the next column    in the same hash bucket.    Assuming no, or "few" hash collisions, the time taken by this routine is    linear in the sum of the sizes (lengths) of each column whose score has    just been computed in the approximate degree computation.    Not user-callable.*/PRIVATE void detect_super_cols(    /* === Parameters ======================================================= */#ifndef NDEBUG    /* these two parameters are only needed when debugging is enabled: */    int n_col,			/* number of columns of A */    RowInfo Row [],		/* of size n_row+1 */#endif    ColInfo Col [],		/* of size n_col+1 */    int A [],			/* row indices of A */    int head [],		/* head of degree lists and hash buckets */    int row_start,		/* pointer to set of columns to check */    int row_length		/* number of columns to check */){    /* === Local variables ================================================== */    int hash ;			/* hash # for a column */    int *rp ;			/* pointer to a row */    int c ;			/* a column index */    int super_c ;		/* column index of the column to absorb into */    int *cp1 ;			/* column pointer for column super_c */    int *cp2 ;			/* column pointer for column c */    int length ;		/* length of column super_c */    int prev_c ;		/* column preceding c in hash bucket */    int i ;			/* loop counter */    int *rp_end ;		/* pointer to the end of the row */    int col ;			/* a column index in the row to check */    int head_column ;		/* first column in hash bucket or degree list */    int first_col ;		/* first column in hash bucket */    /* === Consider ea

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -