aboutsummaryrefslogtreecommitdiff
path: root/src/partialator.c
diff options
context:
space:
mode:
authorThomas White <taw@physics.org>2011-07-04 13:21:36 +0200
committerThomas White <taw@physics.org>2012-02-22 15:27:31 +0100
commitb67429762f02d906fdc3ab14da4577c958937679 (patch)
tree5480a78070ce1e319ea15674ff61255dbc77e7d9 /src/partialator.c
parentb7928ad9b90c45ff227bb6124c3a365283f11db4 (diff)
Don't select scalable reflections during post refinement
Diffstat (limited to 'src/partialator.c')
-rw-r--r--src/partialator.c60
1 files changed, 58 insertions, 2 deletions
diff --git a/src/partialator.c b/src/partialator.c
index 367d3a6e..167f736d 100644
--- a/src/partialator.c
+++ b/src/partialator.c
@@ -151,6 +151,46 @@ static void refine_all(struct image *images, int n_total_patterns,
}
+/* Decide which reflections can be scaled */
+static int select_scalable_reflections(RefList *list, ReflItemList *sc_l)
+{
+ Reflection *refl;
+ RefListIterator *iter;
+ int nobs = 0;
+
+ for ( refl = first_refl(list, &iter);
+ refl != NULL;
+ refl = next_refl(refl, iter) ) {
+
+ int scalable = 1;
+ double v;
+
+ if ( get_partiality(refl) < 0.1 ) scalable = 0;
+ v = fabs(get_intensity(refl));
+ if ( v < 0.1 ) scalable = 0;
+ set_scalable(refl, scalable);
+
+ if ( scalable ) {
+
+ signed int h, k, l;
+
+ nobs++;
+
+ /* Add (asymmetric) indices to list */
+ get_indices(refl, &h, &k, &l);
+
+ if ( !find_item(sc_l, h, k, l) ) {
+ add_item(sc_l, h, k, l);
+ }
+
+ }
+
+ }
+
+ return nobs;
+}
+
+
int main(int argc, char *argv[])
{
int c;
@@ -173,6 +213,7 @@ int main(int argc, char *argv[])
int n_notfound = 0;
char *cref;
int n_usable_patterns = 0;
+ int nobs;
char *reference_file = NULL;
double *reference = NULL;
RefList *reference_list = NULL;
@@ -313,6 +354,7 @@ int main(int argc, char *argv[])
/* Fill in what we know about the images so far */
rewind(fh);
scalable = new_items();
+ nobs = 0;
for ( i=0; i<n_total_patterns; i++ ) {
RefList *as;
@@ -355,9 +397,11 @@ int main(int argc, char *argv[])
reflist_free(cur->reflections);
cur->reflections = as;
- update_partialities(cur, sym, scalable,
+ update_partialities(cur, sym,
&n_expected, &n_found, &n_notfound);
+ nobs += select_scalable_reflections(cur->reflections, scalable);
+
progress_bar(i, n_total_patterns-1, "Loading pattern data");
n_usable_patterns++;
@@ -366,7 +410,7 @@ int main(int argc, char *argv[])
STATUS("Found %5.2f%% of the expected peaks (missed %i of %i).\n",
100.0 * (double)n_found / n_expected, n_notfound, n_expected);
STATUS("Mean measurements per scalable unique reflection: %5.2f\n",
- (double)n_found / num_items(scalable));
+ (double)nobs / num_items(scalable));
cref = find_common_reflections(images, n_usable_patterns);
@@ -438,6 +482,18 @@ int main(int argc, char *argv[])
refine_all(images, n_usable_patterns, det, sym, scalable,
reference_list, nthreads, fhg, fhp);
+ nobs = 0;
+ clear_items(scalable);
+ for ( i=0; i<n_usable_patterns; i++ ) {
+
+ struct image *cur = &images[i];
+ nobs += select_scalable_reflections(cur->reflections,
+ scalable);
+
+ }
+ STATUS("Mean measurements per scalable unique "
+ "reflection: %5.2f\n", (double)nobs/num_items(scalable));
+
/* Re-estimate all the full intensities */
reflist_free(full);
full = scale_intensities(images, n_usable_patterns,