/* * partialator.c * * Scaling and post refinement for coherent nanocrystallography * * (c) 2006-2011 Thomas White * * Part of CrystFEL - crystallography with a FEL * */ #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include "utils.h" #include "hdf5-file.h" #include "symmetry.h" #include "reflections.h" #include "stream.h" #include "geometry.h" #include "peaks.h" #include "thread-pool.h" #include "beam-parameters.h" #include "post-refinement.h" #include "hrs-scaling.h" /* Maximum number of iterations of NLSq to do for each image per macrocycle. */ #define MAX_CYCLES (100) static void show_help(const char *s) { printf("Syntax: %s [options]\n\n", s); printf( "Scaling and post refinement for coherent nanocrystallography.\n" "\n" " -h, --help Display this help message.\n" "\n" " -i, --input= Specify the name of the input 'stream'.\n" " (must be a file, not e.g. stdin)\n" " -o, --output= Output filename. Default: facetron.hkl.\n" " -g. --geometry= Get detector geometry from file.\n" " -b, --beam= Get beam parameters from file, which provides\n" " initial values for parameters, and nominal\n" " wavelengths if no per-shot value is found in \n" " an HDF5 file.\n" " -x, --prefix=

Prefix filenames from input file with

.\n" " --basename Remove the directory parts of the filenames.\n" " --no-check-prefix Don't attempt to correct the --prefix.\n" " -y, --symmetry= Merge according to symmetry .\n" " -n, --iterations= Run cycles of scaling and post-refinement.\n" "\n" " -j Run analyses in parallel.\n"); } struct refine_args { const char *sym; ReflItemList *obs; double *i_full; struct image *image; FILE *graph; FILE *pgraph; }; static void refine_image(int mytask, void *tasks) { struct refine_args *all_args = tasks; struct refine_args *pargs = &all_args[mytask]; struct image *image = pargs->image; double nominal_photon_energy = pargs->image->beam->photon_energy; struct hdfile *hdfile; struct cpeak *spots; int n, i; double dev, last_dev; hdfile = hdfile_open(image->filename); if ( hdfile == NULL ) { ERROR("Couldn't open '%s'\n", image->filename); return; } else if ( hdfile_set_image(hdfile, "/data/data0") ) { ERROR("Couldn't select path\n"); hdfile_close(hdfile); return; } if ( hdf5_read(hdfile, pargs->image, 0, nominal_photon_energy) ) { ERROR("Couldn't read '%s'\n", image->filename); hdfile_close(hdfile); return; } double a, b, c, al, be, ga; cell_get_parameters(image->indexed_cell, &a, &b, &c, &al, &be, &ga); STATUS("Initial cell: %5.2f %5.2f %5.2f nm %5.2f %5.2f %5.2f deg\n", a/1.0e-9, b/1.0e-9, c/1.0e-9, rad2deg(al), rad2deg(be), rad2deg(ga)); spots = find_intersections(image, image->indexed_cell, &n, 0); dev = +INFINITY; i = 0; do { last_dev = dev; dev = pr_iterate(image, pargs->i_full, pargs->sym, &spots, &n); STATUS("Iteration %2i: mean dev = %5.2f\n", i, dev); i++; } while ( (fabs(last_dev - dev) > 1.0) && (i < MAX_CYCLES) ); mean_partial_dev(image, spots, n, pargs->sym, pargs->i_full, pargs->graph); if ( pargs->pgraph ) { fprintf(pargs->pgraph, "%5i %5.2f\n", mytask, dev); } free(image->data); if ( image->flags != NULL ) free(image->flags); hdfile_close(hdfile); free(spots); /* Muppet proofing */ image->data = NULL; image->flags = NULL; } static void refine_all(struct image *images, int n_total_patterns, struct detector *det, const char *sym, ReflItemList *obs, double *i_full, int nthreads, FILE *graph, FILE *pgraph) { struct refine_args *tasks; int i; tasks = malloc(n_total_patterns * sizeof(struct refine_args)); for ( i=0; ih, spot->k, spot->l, &ha, &ka, &la, sym); spot->h = ha; spot->k = ka; spot->l = la; } static void integrate_image(struct image *image, ReflItemList *obs, const char *sym) { struct cpeak *spots; int j, n; struct hdfile *hdfile; double nominal_photon_energy = image->beam->photon_energy; hdfile = hdfile_open(image->filename); if ( hdfile == NULL ) { ERROR("Couldn't open '%s'\n", image->filename); return; } else if ( hdfile_set_image(hdfile, "/data/data0") ) { ERROR("Couldn't select path\n"); hdfile_close(hdfile); return; } if ( hdf5_read(hdfile, image, 0, nominal_photon_energy) ) { ERROR("Couldn't read '%s'\n", image->filename); hdfile_close(hdfile); return; } /* Figure out which spots should appear in this pattern */ spots = find_intersections(image, image->indexed_cell, &n, 0); /* For each reflection, estimate the partiality */ for ( j=0; jcpeaks = spots; image->n_cpeaks = n; free(image->data); if ( image->flags != NULL ) free(image->flags); hdfile_close(hdfile); /* Muppet proofing */ image->data = NULL; image->flags = NULL; } /* Decide which reflections can be scaled */ static void select_scalable_reflections(struct image *images, int n) { int m; for ( m=0; mdivergence; images[i].bw = beam->bandwidth; images[i].det = det; images[i].beam = beam; images[i].osf = 1.0; images[i].profile_radius = 0.005e9; /* Muppet proofing */ images[i].data = NULL; images[i].flags = NULL; /* Get reflections from this image. * FIXME: Use the ones from the stream */ integrate_image(&images[i], obs, sym); progress_bar(i, n_total_patterns-1, "Loading pattern data"); } fclose(fh); free(prefix); cts = new_list_count(); /* Make initial estimates */ STATUS("Performing initial scaling.\n"); select_scalable_reflections(images, n_total_patterns); I_full = scale_intensities(images, n_total_patterns, sym, obs); /* Iterate */ for ( i=0; ipanels); free(det); free(beam); free(cts); for ( i=0; i