|  | 
| 11 | 11 | #include <executorch/runtime/core/memory_allocator.h> | 
| 12 | 12 | #include <sys/stat.h> | 
| 13 | 13 | #include <xnnpack.h> | 
|  | 14 | +#include <exception> | 
|  | 15 | +#include <memory> | 
|  | 16 | +#include <new> | 
| 14 | 17 | #include <string> | 
| 15 | 18 | #include <vector> | 
| 16 | 19 | 
 | 
| @@ -155,21 +158,45 @@ size_t XNNWeightsCache::look_up( | 
| 155 | 158 |   return packed_weight_entry->second.offset; | 
| 156 | 159 | } | 
| 157 | 160 | 
 | 
|  | 161 | +/** | 
|  | 162 | + * Reserve space in the weight cache for n bytes of weight data, aligned to | 
|  | 163 | + * context->kPackedAllocationAlignment. This function will return nullptr if | 
|  | 164 | + * the allocation fails. | 
|  | 165 | + */ | 
| 158 | 166 | void* XNNWeightsCache::reserve_space(XNNWeightsCache* context, size_t n) { | 
| 159 | 167 |   // MemoryAllocator* allocator = context->runtime_allocator_; | 
| 160 | 168 |   // void* reserved_pointer = allocator->allocate(n, | 
| 161 | 169 |   // context->kPackedAllocationAlignment); | 
| 162 | 170 | 
 | 
| 163 | 171 |   // return reserved_pointer; | 
| 164 |  | -  std::string data_container; | 
| 165 |  | -  data_container.resize(n + context->kPackedAllocationAlignment); | 
| 166 |  | -  void* maybe_aligned_space = data_container.data(); | 
| 167 |  | -  void* aligned_space = (void*)((intptr_t)maybe_aligned_space + 64 - | 
| 168 |  | -                                (intptr_t)maybe_aligned_space % 64); | 
| 169 |  | - | 
| 170 |  | -  context->packed_pointer_to_container_[aligned_space] = | 
| 171 |  | -      std::move(data_container); | 
| 172 |  | -  return aligned_space; | 
|  | 172 | +  try { | 
|  | 173 | +    std::string data_container; | 
|  | 174 | +    size_t raw_allocation_size = n + context->kPackedAllocationAlignment - 1; | 
|  | 175 | +    data_container.resize(raw_allocation_size); | 
|  | 176 | + | 
|  | 177 | +    void* maybe_aligned_space = data_container.data(); | 
|  | 178 | +    void* aligned_space = std::align( | 
|  | 179 | +        context->kPackedAllocationAlignment, | 
|  | 180 | +        n, | 
|  | 181 | +        maybe_aligned_space, | 
|  | 182 | +        raw_allocation_size // Note that std::align mutates this value. | 
|  | 183 | +    ); | 
|  | 184 | +    ET_CHECK_MSG(aligned_space != nullptr, "Memory alignment failed."); | 
|  | 185 | + | 
|  | 186 | +    context->packed_pointer_to_container_[aligned_space] = | 
|  | 187 | +        std::move(data_container); | 
|  | 188 | +    return aligned_space; | 
|  | 189 | +  } catch (std::bad_alloc& e) { | 
|  | 190 | +    // XNNPACK can gracefully handle allocation failures, so return nullptr. | 
|  | 191 | +    // We want to be able to recover from a failed attempt to load a large | 
|  | 192 | +    // model without a crash. | 
|  | 193 | +    ET_LOG( | 
|  | 194 | +        Error, | 
|  | 195 | +        "XNN weight cache failed to allocate %zu bytes: %s.", | 
|  | 196 | +        n, | 
|  | 197 | +        e.what()); | 
|  | 198 | +    return nullptr; | 
|  | 199 | +  } | 
| 173 | 200 | } | 
| 174 | 201 | 
 | 
| 175 | 202 | size_t XNNWeightsCache::look_up_or_insert( | 
|  | 
0 commit comments