X-Git-Url: http://git.indexdata.com/?a=blobdiff_plain;f=src%2Frelevance.c;h=e484ca9b2ebf7120b4893ceba6116886f52f50e5;hb=69726762604ca262c12041b34564b9a74833d3dd;hp=9de591f085ea43ba070316fcf8c2dbb14462b5af;hpb=fb1ba03adf3d800d2b40e7f0d6ea367e6c520356;p=pazpar2-moved-to-github.git diff --git a/src/relevance.c b/src/relevance.c index 9de591f..e484ca9 100644 --- a/src/relevance.c +++ b/src/relevance.c @@ -1,5 +1,5 @@ /* This file is part of Pazpar2. - Copyright (C) 2006-2013 Index Data + Copyright (C) Index Data Pazpar2 is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free @@ -77,6 +77,7 @@ const int scorefield_none = -1; // Do not normalize anything, use tf/idf as is // This is the old behavior, and the default const int scorefield_internal = -2; // use our tf/idf, but normalize it const int scorefield_position = -3; // fake a score based on the position +// Positive numbers indicate the field to be used for scoring. // A structure for each (sub)record. There is one list for each client struct norm_record @@ -130,7 +131,7 @@ struct norm_client *findnorm( struct relevance *rel, struct client* client) } -// Add a record in the list for that client, for normalizing later +// Add all records from a cluster into the list for that client, for normalizing later static void setup_norm_record( struct relevance *rel, struct record_cluster *clust) { struct record *record; @@ -155,10 +156,10 @@ static void setup_norm_record( struct relevance *rel, struct record_cluster *cl { struct record_metadata *md = record->metadata[norm->scorefield]; rp->score = md->data.fnumber; - assert(rp->score>0); // ### } yaz_log(YLOG_LOG,"Got score for %d/%d : %f ", norm->num, record->position, rp->score ); + record -> score = rp->score; if ( norm->count == 1 ) { norm->max = rp->score; @@ -166,8 +167,8 @@ static void setup_norm_record( struct relevance *rel, struct record_cluster *cl } else { if ( rp->score > norm->max ) norm->max = rp->score; - if ( rp->score < norm->min && abs(rp->score) < 1e-6 ) - norm->min = rp->score; // skip zeroes + if ( rp->score < norm->min ) + norm->min = rp->score; } } } @@ -187,14 +188,19 @@ static double squaresum( struct norm_record *rp, double a, double b) return sum; } +// For each client, normalize scores static void normalize_scores(struct relevance *rel) { - // For each client, normalize scores + const int maxiterations = 1000; + const double enough = 100.0; // sets the number of decimals we are happy with + const double stepchange = 0.5; // reduction of the step size when finding middle + // 0.5 sems to be magical, much better than 0.4 or 0.6 struct norm_client *norm; for ( norm = rel->norm; norm; norm = norm->next ) { - yaz_log(YLOG_LOG,"Normalizing client %d: scorefield=%d count=%d", - norm->num, norm->scorefield, norm->count); + yaz_log(YLOG_LOG,"Normalizing client %d: scorefield=%d count=%d range=%f %f = %f", + norm->num, norm->scorefield, norm->count, norm->min, + norm->max, norm->max-norm->min); norm->a = 1.0; // default normalizing factors, no change norm->b = 0.0; if ( norm->scorefield != scorefield_none && @@ -205,56 +211,95 @@ static void normalize_scores(struct relevance *rel) double a,b; // params to optimize double as,bs; // step sizes double chi; + char *branch = "?"; // initial guesses for the parameters + // Rmax = a * rmax + b # want to be 1.0 + // Rmin = a * rmin + b # want to be 0.0 + // Rmax - Rmin = a ( rmax - rmin ) # subtracting equations + // 1.0 - 0.0 = a ( rmax - rmin ) + // a = 1 / range + // Rmin = a * rmin + b + // b = Rmin - a * rmin + // = 0.0 - 1/range * rmin + // = - rmin / range + if ( range < 1e-6 ) // practically zero range = norm->max; a = 1.0 / range; - b = abs(norm->min); - as = a / 3; - bs = b / 3; + b = -1.0 * norm->min / range; + // b = fabs(norm->min) / range; + as = a / 10; + bs = fabs(b) / 10; chi = squaresum( norm->records, a,b); - while (it++ < 100) // safeguard against things not converging + yaz_log(YLOG_LOG,"Initial done: it=%d: a=%f / %f b=%f / %f chi = %f", + 0, a, as, b, bs, chi ); + while (it++ < maxiterations) // safeguard against things not converging { - // optimize a - double plus = squaresum(norm->records, a+as, b); - double minus= squaresum(norm->records, a-as, b); - if ( plus < chi && plus < minus ) + double aplus = squaresum(norm->records, a+as, b); + double aminus= squaresum(norm->records, a-as, b); + double bplus = squaresum(norm->records, a, b+bs); + double bminus= squaresum(norm->records, a, b-bs); + double prevchi = chi; + if ( aplus < chi && aplus < aminus && aplus < bplus && aplus < bminus) { a = a + as; - chi = plus; + chi = aplus; + as = as * (1.0 + stepchange); + branch = "aplus "; } - else if ( minus < chi && minus < plus ) + else if ( aminus < chi && aminus < aplus && aminus < bplus && aminus < bminus) { a = a - as; - chi = minus; + chi = aminus; + as = as * (1.0 + stepchange); + branch = "aminus"; } - else - as = as / 2; - // optimize b - plus = squaresum(norm->records, a, b+bs); - minus= squaresum(norm->records, a, b-bs); - if ( plus < chi && plus < minus ) + else if ( bplus < chi && bplus < aplus && bplus < aminus && bplus < bminus) { b = b + bs; - chi = plus; + chi = bplus; + bs = bs * (1.0 + stepchange); + branch = "bplus "; } - else if ( minus < chi && minus < plus ) + else if ( bminus < chi && bminus < aplus && bminus < bplus && bminus < aminus) { b = b - bs; - chi = minus; + chi = bminus; + branch = "bminus"; + bs = bs * (1.0+stepchange); } else - bs = bs / 2; - yaz_log(YLOG_LOG,"Fitting it=%d: a=%f / %f b=%f / %f chi = %f", - it, a, as, b, bs, chi ); + { // a,b is the best so far, adjust one step size + // which one? The one that has the greatest effect to chi + // That is, the average of plus and minus is further away from chi + double adif = 0.5 * ( aplus + aminus ) - prevchi; + double bdif = 0.5 * ( bplus + bminus ) - prevchi; + if ( fabs(adif) > fabs(bdif) ) + { + as = as * ( 1.0 - stepchange); + branch = "step a"; + } + else + { + bs = bs * ( 1.0 - stepchange); + branch = "step b"; + } + } + yaz_log(YLOG_LOG,"Fitting %s it=%d: a=%g %g b=%g %g chi=%g ap=%g am=%g, bp=%g bm=%g p=%g", + branch, it, a, as, b, bs, chi, + aplus, aminus, bplus, bminus, prevchi ); norm->a = a; norm->b = b; - if ( abs(as) * 1000.0 < abs(a) && - abs(bs) * 1000.0 < abs(b) ) + if ( fabs(as) * enough < fabs(a) && + fabs(bs) * enough < fabs(b) ) { break; // not changing much any more + + } } + yaz_log(YLOG_LOG,"Fitting done: it=%d: a=%g / %g b=%g / %g chi = %g", + it-1, a, as, b, bs, chi ); } - + if ( norm->scorefield != scorefield_none ) { // distribute the normalized scores to the records struct norm_record *nr = norm->records; @@ -262,14 +307,13 @@ static void normalize_scores(struct relevance *rel) double r = nr->score; r = norm->a * r + norm -> b; nr->clust->relevance_score = 10000 * r; + nr->record->score = r; yaz_log(YLOG_LOG,"Normalized %f * %f + %f = %f", nr->score, norm->a, norm->b, r ); // TODO - This keeps overwriting the cluster score in random order! - // Need to merge results better + // Need to merge results better } - } - } // client loop } @@ -602,7 +646,7 @@ void relevance_prepare_read(struct relevance *rel, struct reclist *reclist) rel->doc_frequency_vec[i]); } } - // Calculate relevance for each document + // Calculate relevance for each document (cluster) while (1) { int relevance = 0; @@ -649,14 +693,15 @@ void relevance_prepare_read(struct relevance *rel, struct reclist *reclist) // Build the normalizing structures // List of (sub)records for each target setup_norm_record( rel, rec ); - - // TODO - Loop again, merge individual record scores into clusters - // Can I reset the reclist, or can I leave and enter without race conditions? - + } // cluster loop normalize_scores(rel); - + + // TODO - Calculate the cluster scores from individual records + // At the moment the record scoring puts one of them in the cluster... + reclist_rewind(reclist); + reclist_leave(reclist); xfree(idfvec);