HiRep 0.1
|
Functions needed for the new geometry implementation that will replace the current geometry in the future. More...
#include "spinor_field.h"
#include "suN.h"
#include "suN_types.h"
#include <stdint.h>
#include <stddef.h>
Go to the source code of this file.
Data Structures | |
struct | coord4 |
struct | box_t |
Macros | |
#define | _DECLARE_SYNC_TO_BUFFER(_name, _field_type, _type) |
#define | _DECLARE_SYNC_TO_BUFFER_REDUCED(_name, _field_type, _type) |
Typedefs | |
typedef struct coord4 | coord4 |
typedef struct box_t | box_t |
Enumerations | |
enum | box_type { L2 = 2 , L3 = 3 , INNER = 4 , SENDBUF = 5 } |
Functions | |
void | define_geometry (void) |
void * | sendbuf_alloc (size_t bytes_per_site) |
void * | sendbuf_alloc_gpu (size_t bytes_per_site) |
void | sync_field (geometry_descriptor *gd, size_t byte_per_site, int is_spinor_like, void *latticebuf, void *sb_ptr) |
int | test_define_geometry (void) |
void | sendbuf_report (void) |
void | sync_field_gpu (geometry_descriptor *, int, int, void *, void *) |
int | boxEvenVolume (box_t *B) |
int | boxOddVolume (box_t *B) |
int | boxVolume (box_t *) |
void | geometryMemSize (box_t *G, size_t *total, size_t *buffers) |
Compute memory size for total=inner + buffers, and buffers only. | |
void | sync_box_to_buffer_gpu_spinor_field (geometry_descriptor *, box_t *, spinor_field *, void *) |
void | sync_box_to_buffer_gpu_spinor_field_flt (geometry_descriptor *, box_t *, spinor_field_flt *, void *) |
void | sync_box_to_buffer_gpu_reduced_spinor_field (geometry_descriptor *, box_t *, spinor_field *, void *, int, char) |
void | sync_box_reduced_init_spinor_field (geometry_descriptor *, box_t *, spinor_field *, void *, int) |
void | sync_box_to_buffer_gpu_reduced_spinor_field_flt (geometry_descriptor *, box_t *, spinor_field_flt *, void *, int, char) |
void | sync_box_reduced_init_spinor_field_flt (geometry_descriptor *, box_t *, spinor_field_flt *, void *, int) |
void | sync_box_to_buffer_gpu_scalar_field (geometry_descriptor *, box_t *, scalar_field *, void *) |
void | sync_box_to_buffer_gpu_suNg_field (geometry_descriptor *, box_t *, suNg_field *, void *) |
void | sync_box_to_buffer_gpu_suNf_field (geometry_descriptor *, box_t *, suNf_field *, void *) |
void | sync_box_to_buffer_gpu_suNg_field_flt (geometry_descriptor *, box_t *, suNg_field_flt *, void *) |
void | sync_box_to_buffer_gpu_suNf_field_flt (geometry_descriptor *, box_t *, suNf_field_flt *, void *) |
void | sync_box_to_buffer_gpu_suNg_scalar_field (geometry_descriptor *, box_t *, suNg_scalar_field *, void *) |
void | sync_box_to_buffer_gpu_suNg_av_field (geometry_descriptor *, box_t *, suNg_av_field *, void *) |
void | sync_box_to_buffer_gpu_gtransf (geometry_descriptor *, box_t *, gtransf *, void *) |
void | sync_box_to_buffer_gpu_ldl_field (geometry_descriptor *, box_t *, ldl_field *, void *) |
void | sync_box_to_buffer_gpu_clover_term (geometry_descriptor *, box_t *, clover_term *, void *) |
void | sync_box_to_buffer_gpu_clover_force (geometry_descriptor *, box_t *, clover_force *, void *) |
void | sync_box_to_buffer_gpu_staple_field (geometry_descriptor *, box_t *, staple_field *, void *) |
Functions needed for the new geometry implementation that will replace the current geometry in the future.
Headerfile for:
#define _DECLARE_SYNC_TO_BUFFER | ( | _name, | |
_field_type, | |||
_type ) |
#define _DECLARE_SYNC_TO_BUFFER_REDUCED | ( | _name, | |
_field_type, | |||
_type ) |
typedef struct box_t box_t |
-—h ....| NB: the h[4] is not in the box, ....| i.e. coordinates needs to be l[]<= p[] <h[] l...|
void geometryMemSize | ( | box_t * | G, |
size_t * | total, | ||
size_t * | buffers ) |
Compute memory size for total=inner + buffers, and buffers only.
Lattice memory layout is as follows: |<- 0 index EVEN | EVEN LOCAL VOL | EVEN L3 #1 | ... | EVEN L3 #n | | ODD LOCAL VOL | ODD L3 #1 | ... | ODD L3 #n | and only for the gauge (glattice) | EVEN L2 #1 | ... | EVEN L2 #m | | ODD L2 #1 | ... | ODD L2 #m |
Send buffers are outside the fields they are organized similarly: | SBUF EVEN L3 #1 | ... | SBUF EVEN L3 #n | | SBUF ODD L3 #1 | ... | SBUF ODD L3 #n | | SBUF EVEN L2 #1 | ... | SBUF EVEN L2 #m | | SBUF ODD L2 #1 | ... | SBUF ODD L2 #m |
Send buffers are shared among all fields of same size they are always allocated as glattice i.e. they contain all L3 and L2 buffers TODO: change this: make sendbuf_alloc take the full size of the sendbuffer, not the size per site