Low-Level Abstraction of Memory Access
Array.hpp File Reference
#include "macros.hpp"
#include <ostream>
#include <stdexcept>
#include <tuple>
+ Include dependency graph for Array.hpp:
+ This graph shows which files directly or indirectly include this file:

Go to the source code of this file.

Classes

struct  llama::Array< T, N >
 
struct  llama::Array< T, 0 >
 
struct  std::tuple_size< llama::Array< T, N > >
 
struct  std::tuple_element< I, llama::Array< T, N > >
 

Namespaces

 llama
 

Functions

template<typename First , typename... Args>
 llama::Array (First, Args... args) -> Array< First, sizeof...(Args)+1 >
 
template<typename T , std::size_t N>
auto llama::operator<< (std::ostream &os, const Array< T, N > &a) -> std::ostream &
 
template<typename T , std::size_t N>
constexpr auto llama::pushFront ([[maybe_unused]] Array< T, N > a, T v) -> Array< T, N+1 >
 
template<typename T , std::size_t N>
constexpr auto llama::pushBack ([[maybe_unused]] Array< T, N > a, T v) -> Array< T, N+1 >
 
template<typename T , std::size_t N>
constexpr auto llama::popBack ([[maybe_unused]] Array< T, N > a)
 
template<typename T , std::size_t N>
constexpr auto llama::popFront ([[maybe_unused]] Array< T, N > a)
 
template<typename T , std::size_t N>
constexpr auto llama::product (Array< T, N > a) -> T
 
template<typename T , std::size_t N>
constexpr auto llama::dot ([[maybe_unused]] Array< T, N > a, [[maybe_unused]] Array< T, N > b) -> T