This contains my bachelors thesis and associated tex files, code snippets and maybe more. Topic: Data Movement in Heterogeneous Memories with Intel Data Streaming Accelerator
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

117 lines
3.2 KiB

  1. #include <iostream>
  2. #include <random>
  3. #include "cache.hpp"
  4. dsacache::Cache CACHE;
  5. double* GetRandomArray(const size_t size) {
  6. double* array = new double[size];
  7. std::uniform_real_distribution<double> unif(std::numeric_limits<double>::min(), std::numeric_limits<double>::max());
  8. std::default_random_engine re;
  9. for (size_t i = 0; i < size; i++) {
  10. array[i] = unif(re);
  11. }
  12. return array;
  13. }
  14. bool IsEqual(const double* a, const double* b, const size_t size) {
  15. for (size_t i = 0; i < size; i++) {
  16. try {
  17. if (a[i] != b[i]) return false;
  18. }
  19. catch (...) {
  20. return false;
  21. }
  22. }
  23. return true;
  24. }
  25. void PerformAccessAndTest(double* src, const size_t size) {
  26. std::unique_ptr<dsacache::CacheData> data_cache = CACHE.Access(
  27. reinterpret_cast<uint8_t *>(src),
  28. size * sizeof(double)
  29. );
  30. double* cached_imm = reinterpret_cast<double *>(data_cache->GetDataLocation());
  31. // check the value immediately just to see if ram or cache was returned
  32. if (src == cached_imm) {
  33. std::cout << "Caching did not immediately yield different data location." << std::endl;
  34. }
  35. else if (cached_imm == nullptr) {
  36. std::cout << "Immediately got nullptr." << std::endl;
  37. }
  38. else {
  39. std::cout << "Immediately got different data location." << std::endl;
  40. }
  41. // waits for the completion of the asynchronous caching operation
  42. data_cache->WaitOnCompletion();
  43. // gets the cache-data-location from the struct
  44. double* cached = reinterpret_cast<double *>(data_cache->GetDataLocation());
  45. // tests on the resulting value
  46. if (src == cached) {
  47. std::cout << "Caching did not affect data location." << std::endl;
  48. }
  49. else if (cached == nullptr) {
  50. std::cout << "Got nullptr from cache." << std::endl;
  51. }
  52. else {
  53. std::cout << "Got different data location from cache." << std::endl;
  54. }
  55. if (IsEqual(src,cached,size)) {
  56. std::cout << "Cached data is correct." << std::endl;
  57. }
  58. else {
  59. std::cout << "Cached data is wrong." << std::endl;
  60. }
  61. }
  62. int main(int argc, char **argv) {
  63. // given numa destination and source node and the size of the data
  64. // this function decides on which the data will be placed
  65. // which is used to select the HBM-node for the dst-node if desired
  66. auto cache_policy = [](const int numa_dst_node, const int numa_src_node, const size_t data_size) {
  67. return numa_dst_node;
  68. };
  69. // this function receives the memory source and destination node
  70. // and then decides, on which nodes the copy operation will be split
  71. auto copy_policy = [](const int numa_dst_node, const int numa_src_node) {
  72. return std::vector{ numa_src_node, numa_dst_node };
  73. };
  74. // initializes the cache with the two policies
  75. CACHE.Init(cache_policy,copy_policy);
  76. // generate the test data
  77. static constexpr size_t data_size = 1024 * 1024;
  78. double* data = GetRandomArray(data_size);
  79. std::cout << "--- first access --- " << std::endl;
  80. PerformAccessAndTest(data, data_size);
  81. std::cout << "--- second access --- " << std::endl;
  82. PerformAccessAndTest(data, data_size);
  83. std::cout << "--- end of application --- " << std::endl;
  84. }