10#ifndef XTENSOR_SLICE_HPP
11#define XTENSOR_SLICE_HPP
18#include <xtl/xtype_traits.hpp>
20#include "xstorage.hpp"
21#include "xtensor_config.hpp"
24#ifndef XTENSOR_CONSTEXPR
25#if (defined(_MSC_VER) || __GNUC__ < 8)
26#define XTENSOR_CONSTEXPR inline
27#define XTENSOR_GLOBAL_CONSTEXPR static const
29#define XTENSOR_CONSTEXPR constexpr
30#define XTENSOR_GLOBAL_CONSTEXPR constexpr
64 using is_xslice = std::is_base_of<xslice<S>,
S>;
66 template <
class E,
class R =
void>
67 using disable_xslice =
typename std::enable_if<!is_xslice<E>::value,
R>::type;
70 using has_xslice = xtl::disjunction<is_xslice<E>...>;
76#define DEFINE_TAG_CONVERSION(NAME) \
78 XTENSOR_CONSTEXPR NAME convert() const noexcept \
98#undef DEFINE_TAG_CONVERSION
123 size_type operator()(size_type
i)
const noexcept;
125 size_type size()
const noexcept;
126 size_type step_size()
const noexcept;
127 size_type step_size(std::size_t
i, std::size_t
n = 1)
const noexcept;
128 size_type revert_index(std::size_t
i)
const noexcept;
130 bool contains(size_type
i)
const noexcept;
167 size_type operator()(size_type
i)
const noexcept;
169 size_type size()
const noexcept;
170 size_type step_size()
const noexcept;
171 size_type step_size(std::size_t
i, std::size_t
n = 1)
const noexcept;
172 size_type revert_index(std::size_t
i)
const noexcept;
174 bool contains(size_type
i)
const noexcept;
202 explicit xall(size_type size)
noexcept;
205 operator xall<S>()
const noexcept;
210 xall<S> convert()
const noexcept;
212 size_type operator()(size_type
i)
const noexcept;
214 size_type size()
const noexcept;
215 size_type step_size()
const noexcept;
216 size_type step_size(std::size_t
i, std::size_t
n = 1)
const noexcept;
217 size_type revert_index(std::size_t
i)
const noexcept;
219 bool contains(size_type
i)
const noexcept;
282 size_type operator()(size_type
i)
const noexcept;
284 size_type size()
const noexcept;
285 size_type step_size()
const noexcept;
286 size_type step_size(std::size_t
i, std::size_t
n = 1)
const noexcept;
287 size_type revert_index(std::size_t
i)
const noexcept;
289 bool contains(size_type
i)
const noexcept;
315 struct is_xkeep_slice : std::false_type
320 struct is_xkeep_slice<xkeep_slice<T>> : std::true_type
325 using disable_xkeep_slice_t = std::enable_if_t<!is_xkeep_slice<std::decay_t<T>>::value,
void>;
328 using enable_xkeep_slice_t = std::enable_if_t<is_xkeep_slice<std::decay_t<T>>::value,
void>;
337 using size_type =
typename container_type::value_type;
340 template <
class C,
typename = detail::disable_xkeep_slice_t<C>>
355 size_type operator()(size_type
i)
const noexcept;
356 size_type size()
const noexcept;
358 void normalize(std::size_t
s);
360 size_type step_size(std::size_t
i, std::size_t
n = 1)
const noexcept;
361 size_type revert_index(std::size_t
i)
const;
363 bool contains(size_type
i)
const noexcept;
382 using disable_integral_keep = std::enable_if_t<
383 !xtl::is_integral<std::decay_t<T>>::value,
386 template <
class T,
class R>
387 using enable_integral_keep = std::enable_if_t<xtl::is_integral<T>::value,
xkeep_slice<R>>;
410 template <
class R = std::ptrdiff_t,
class T>
411 inline detail::enable_integral_keep<T, R>
keep(T i)
413 using slice_type = xkeep_slice<R>;
414 using container_type =
typename slice_type::container_type;
415 container_type tmp = {
static_cast<R
>(i)};
416 return slice_type(std::move(tmp));
419 template <
class R = std::ptrdiff_t,
class Arg0,
class Arg1,
class... Args>
420 inline xkeep_slice<R>
keep(Arg0 i0, Arg1 i1, Args... args)
422 using slice_type = xkeep_slice<R>;
423 using container_type =
typename slice_type::container_type;
424 container_type tmp = {
static_cast<R
>(i0),
static_cast<R
>(i1),
static_cast<R
>(args)...};
425 return slice_type(std::move(tmp));
438 struct is_xdrop_slice : std::false_type
443 struct is_xdrop_slice<xdrop_slice<T>> : std::true_type
448 using disable_xdrop_slice_t = std::enable_if_t<!is_xdrop_slice<std::decay_t<T>>::value,
void>;
451 using enable_xdrop_slice_t = std::enable_if_t<is_xdrop_slice<std::decay_t<T>>::value,
void>;
460 using size_type =
typename container_type::value_type;
463 template <
class C,
typename = detail::disable_xdrop_slice_t<C>>
478 size_type operator()(size_type
i)
const noexcept;
479 size_type size()
const noexcept;
481 void normalize(std::size_t
s);
483 size_type step_size(std::size_t
i, std::size_t
n = 1)
const noexcept;
484 size_type revert_index(std::size_t
i)
const;
486 bool contains(size_type
i)
const noexcept;
497 std::map<size_type, size_type> m_inc;
507 using disable_integral_drop = std::enable_if_t<
508 !xtl::is_integral<std::decay_t<T>>::value,
511 template <
class T,
class R>
512 using enable_integral_drop = std::enable_if_t<xtl::is_integral<T>::value,
xdrop_slice<R>>;
534 template <
class R = std::ptrdiff_t,
class T>
535 inline detail::enable_integral_drop<T, R>
drop(T i)
537 using slice_type = xdrop_slice<R>;
538 using container_type =
typename slice_type::container_type;
539 container_type tmp = {
static_cast<R
>(i)};
540 return slice_type(std::move(tmp));
543 template <
class R = std::ptrdiff_t,
class Arg0,
class Arg1,
class... Args>
544 inline xdrop_slice<R>
drop(Arg0 i0, Arg1 i1, Args... args)
546 using slice_type = xdrop_slice<R>;
547 using container_type =
typename slice_type::container_type;
548 container_type tmp = {
static_cast<R
>(i0),
static_cast<R
>(i1),
static_cast<R
>(args)...};
549 return slice_type(std::move(tmp));
556 template <
class A,
class B = A,
class C = A>
566 template <
class MI = A,
class MA = B,
class STEP = C>
567 inline std::enable_if_t<
568 xtl::is_integral<MI>::value && xtl::is_integral<MA>::value && xtl::is_integral<STEP>::value,
570 get(std::size_t size)
const
572 return get_stepped_range(m_start, m_stop, m_step, size);
575 template <
class MI = A,
class MA = B,
class STEP = C>
576 inline std::enable_if_t<
577 !xtl::is_integral<MI>::value && xtl::is_integral<MA>::value && xtl::is_integral<STEP>::value,
579 get(std::size_t size)
const
581 return get_stepped_range(m_step > 0 ? 0 :
static_cast<std::ptrdiff_t
>(size) - 1, m_stop, m_step, size);
584 template <
class MI = A,
class MA = B,
class STEP = C>
585 inline std::enable_if_t<
586 xtl::is_integral<MI>::value && !xtl::is_integral<MA>::value && xtl::is_integral<STEP>::value,
588 get(std::size_t size)
const
590 auto sz =
static_cast<std::ptrdiff_t
>(size);
591 return get_stepped_range(m_start, m_step > 0 ?
sz : -(
sz + 1), m_step, size);
594 template <
class MI = A,
class MA = B,
class STEP = C>
595 inline std::enable_if_t<
596 xtl::is_integral<MI>::value && xtl::is_integral<MA>::value && !xtl::is_integral<STEP>::value,
598 get(std::size_t size)
const
603 template <
class MI = A,
class MA = B,
class STEP = C>
604 inline std::enable_if_t<
605 !xtl::is_integral<MI>::value && !xtl::is_integral<MA>::value && xtl::is_integral<STEP>::value,
607 get(std::size_t size)
const
609 std::ptrdiff_t start = m_step >= 0 ? 0 :
static_cast<std::ptrdiff_t
>(size) - 1;
610 std::ptrdiff_t stop = m_step >= 0 ?
static_cast<std::ptrdiff_t
>(size) : -1;
614 template <
class MI = A,
class MA = B,
class STEP = C>
615 inline std::enable_if_t<
616 xtl::is_integral<MI>::value && !xtl::is_integral<MA>::value && !xtl::is_integral<STEP>::value,
618 get(std::size_t size)
const
623 template <
class MI = A,
class MA = B,
class STEP = C>
624 inline std::enable_if_t<
625 !xtl::is_integral<MI>::value && xtl::is_integral<MA>::value && !xtl::is_integral<STEP>::value,
627 get(std::size_t size)
const
632 template <
class MI = A,
class MA = B,
class STEP = C>
633 inline std::enable_if_t<
634 !xtl::is_integral<MI>::value && !xtl::is_integral<MA>::value && !xtl::is_integral<STEP>::value,
636 get(std::size_t size)
const
658 static auto normalize(std::ptrdiff_t
val, std::size_t
ssize)
660 std::ptrdiff_t size =
static_cast<std::ptrdiff_t
>(
ssize);
662 return (std::max)(std::ptrdiff_t(0), (std::min)(size,
val));
666 get_stepped_range(std::ptrdiff_t start, std::ptrdiff_t stop, std::ptrdiff_t step, std::size_t
ssize)
668 std::ptrdiff_t size =
static_cast<std::ptrdiff_t
>(
ssize);
669 start = (start >= 0) ? start : start + size;
670 stop = (stop >= 0) ? stop : stop + size;
674 start = (std::max)(std::ptrdiff_t(0), (std::min)(size, start));
675 stop = (std::max)(std::ptrdiff_t(0), (std::min)(size, stop));
679 start = (std::max)(std::ptrdiff_t(-1), (std::min)(size - 1, start));
680 stop = (std::max)(std::ptrdiff_t(-1), (std::min)(size - 1, stop));
695 namespace placeholders
702 template <
class...
Args>
705 std::ptrdiff_t rng[3];
708 XTENSOR_CONSTEXPR
xtuph get_tuph_or_val(std::ptrdiff_t , std::true_type)
713 XTENSOR_CONSTEXPR std::ptrdiff_t get_tuph_or_val(std::ptrdiff_t
val, std::false_type)
718 template <
class A,
class B,
class C>
724 {get_tuph_or_val(rng[0], std::is_same<A, xtuph>()),
725 get_tuph_or_val(rng[1], std::is_same<B, xtuph>()),
726 get_tuph_or_val(rng[2], std::is_same<C, xtuph>())}
730 std::ptrdiff_t rng[3];
733 template <
class A,
class B>
739 {get_tuph_or_val(rng[0], std::is_same<A, xtuph>()),
740 get_tuph_or_val(rng[1], std::is_same<B, xtuph>()),
745 std::ptrdiff_t rng[3];
748 template <
class...
OA>
749 XTENSOR_CONSTEXPR
auto operator|(
const rangemaker<OA...>& rng,
const std::ptrdiff_t&
t)
751 auto nrng =
rangemaker<
OA..., std::ptrdiff_t>({rng.rng[0], rng.rng[1], rng.rng[2]});
752 nrng.rng[
sizeof...(OA)] =
t;
756 template <
class... OA>
763 XTENSOR_GLOBAL_CONSTEXPR xtuph _{};
764 XTENSOR_GLOBAL_CONSTEXPR rangemaker<> _r = rangemaker<>({0, 0, 0});
765 XTENSOR_GLOBAL_CONSTEXPR xall_tag _a{};
766 XTENSOR_GLOBAL_CONSTEXPR xnewaxis_tag _n{};
767 XTENSOR_GLOBAL_CONSTEXPR xellipsis_tag _e{};
772 return placeholders::xtuph();
777 template <
class T,
class E =
void>
778 struct cast_if_integer
789 struct cast_if_integer<T, std::enable_if_t<xtl::is_integral<T>::value>>
791 using type = std::ptrdiff_t;
795 return static_cast<type
>(t);
800 using cast_if_integer_t =
typename cast_if_integer<T>::type;
817 template <
class A,
class B>
822 detail::cast_if_integer<B>{}(
stop_val),
838 template <
class A,
class B,
class C>
843 detail::cast_if_integer<B>{}(
stop_val),
844 detail::cast_if_integer<C>{}(step)
853 inline disable_xslice<S, std::size_t> get_size(
const S&)
noexcept
859 inline auto get_size(
const xslice<S>& slice)
noexcept
861 return slice.derived_cast().size();
869 inline disable_xslice<S, std::size_t> step_size(
const S&, std::size_t)
noexcept
875 inline disable_xslice<S, std::size_t> step_size(
const S&, std::size_t, std::size_t)
noexcept
881 inline auto step_size(
const xslice<S>& slice, std::size_t idx)
noexcept
883 return slice.derived_cast().step_size(idx);
887 inline auto step_size(
const xslice<S>& slice, std::size_t idx, std::size_t n)
noexcept
889 return slice.derived_cast().step_size(idx, n);
896 template <
class S,
class I>
897 inline disable_xslice<S, std::size_t> value(
const S& s, I)
noexcept
899 return static_cast<std::size_t
>(s);
902 template <
class S,
class I>
903 inline auto value(
const xslice<S>& slice, I i)
noexcept
905 using ST =
typename S::size_type;
906 return slice.derived_cast()(
static_cast<ST
>(i));
916 struct slice_implementation_getter
918 template <
class E,
class SL>
919 inline decltype(
auto)
operator()(E& e, SL&& slice, std::size_t index)
const
921 return get_slice(e, std::forward<SL>(slice), index, xtl::is_signed<std::decay_t<SL>>());
926 template <
class E,
class SL>
927 inline decltype(
auto) get_slice(E&, SL&& slice, std::size_t, std::false_type)
const
929 return std::forward<SL>(slice);
932 template <
class E,
class SL>
933 inline decltype(
auto) get_slice(E& e, SL&& slice, std::size_t index, std::true_type)
const
935 using int_type = std::decay_t<SL>;
936 return slice < int_type(0) ? slice + static_cast<std::ptrdiff_t>(e.shape(index))
937 : std::ptrdiff_t(slice);
941 struct keep_drop_getter
943 template <
class E,
class SL>
944 inline decltype(
auto)
operator()(E& e, SL&& slice, std::size_t index)
const
946 slice.normalize(e.shape()[index]);
947 return std::forward<SL>(slice);
950 template <
class E,
class SL>
951 inline auto operator()(E& e,
const SL& slice, std::size_t index)
const
953 return this->operator()(e, SL(slice), index);
958 struct slice_implementation_getter<xkeep_slice<T>> : keep_drop_getter
963 struct slice_implementation_getter<xdrop_slice<T>> : keep_drop_getter
968 struct slice_implementation_getter<xall_tag>
970 template <
class E,
class SL>
971 inline auto operator()(E& e, SL&&, std::size_t index)
const
973 return xall<typename E::size_type>(e.shape()[index]);
978 struct slice_implementation_getter<xnewaxis_tag>
980 template <
class E,
class SL>
981 inline auto operator()(E&, SL&&, std::size_t)
const
983 return xnewaxis<typename E::size_type>();
987 template <
class A,
class B,
class C>
988 struct slice_implementation_getter<xrange_adaptor<A, B, C>>
990 template <
class E,
class SL>
991 inline auto operator()(E& e, SL&& adaptor, std::size_t index)
const
993 return adaptor.get(e.shape()[index]);
998 template <
class E,
class SL>
999 inline auto get_slice_implementation(E& e, SL&& slice, std::size_t index)
1001 detail::slice_implementation_getter<std::decay_t<SL>> getter;
1002 return getter(e, std::forward<SL>(slice), index);
1011 template <
class E,
class SL>
1012 struct get_slice_type_impl
1018 struct get_slice_type_impl<E, xall_tag>
1020 using type = xall<typename E::size_type>;
1024 struct get_slice_type_impl<E, xnewaxis_tag>
1026 using type = xnewaxis<typename E::size_type>;
1029 template <
class E,
class A,
class B,
class C>
1030 struct get_slice_type_impl<E, xrange_adaptor<A, B, C>>
1032 using type =
decltype(xrange_adaptor<A, B, C>(A(), B(), C()).get(0));
1036 template <
class E,
class SL>
1037 using get_slice_type =
typename detail::get_slice_type_impl<E, std::remove_reference_t<SL>>::type;
1044 inline auto xslice<D>::derived_cast() noexcept -> derived_type&
1046 return *
static_cast<derived_type*
>(
this);
1050 inline auto xslice<D>::derived_cast() const noexcept -> const derived_type&
1052 return *
static_cast<const derived_type*
>(
this);
1060 inline xrange<T>::xrange(size_type start_val, size_type stop_val) noexcept
1061 : m_start(start_val)
1062 , m_size(stop_val > start_val ? stop_val - start_val : 0)
1067 template <
class S,
typename>
1068 inline xrange<T>::operator xrange<S>() const noexcept
1071 ret.m_start =
static_cast<S
>(m_start);
1072 ret.m_size =
static_cast<S
>(m_size);
1077 template <
class S,
typename>
1078 inline xrange<S> xrange<T>::convert() const noexcept
1080 return xrange<S>(*
this);
1084 inline auto xrange<T>::operator()(size_type i)
const noexcept -> size_type
1090 inline auto xrange<T>::size() const noexcept -> size_type
1096 inline auto xrange<T>::step_size() const noexcept -> size_type
1102 inline auto xrange<T>::step_size(std::size_t , std::size_t n)
const noexcept -> size_type
1104 return static_cast<size_type
>(n);
1108 inline auto xrange<T>::revert_index(std::size_t i)
const noexcept -> size_type
1114 inline bool xrange<T>::contains(size_type i)
const noexcept
1116 return i >= m_start && i < m_start + m_size;
1120 inline bool xrange<T>::operator==(
const self_type& rhs)
const noexcept
1122 return (m_start == rhs.m_start) && (m_size == rhs.m_size);
1126 inline bool xrange<T>::operator!=(
const self_type& rhs)
const noexcept
1128 return !(*
this == rhs);
1136 inline xstepped_range<T>::xstepped_range(size_type start_val, size_type stop_val, size_type step) noexcept
1137 : m_start(start_val)
1138 , m_size(size_type(0))
1141 size_type n = stop_val - start_val;
1142 m_size = n / step + (((n < 0) ^ (step > 0)) && (n % step));
1146 template <
class S,
typename>
1147 inline xstepped_range<T>::operator xstepped_range<S>() const noexcept
1149 xstepped_range<S> ret;
1150 ret.m_start =
static_cast<S
>(m_start);
1151 ret.m_size =
static_cast<S
>(m_size);
1152 ret.m_step =
static_cast<S
>(m_step);
1157 template <
class S,
typename>
1158 inline xstepped_range<S> xstepped_range<T>::convert() const noexcept
1160 return xstepped_range<S>(*
this);
1164 inline auto xstepped_range<T>::operator()(size_type i)
const noexcept -> size_type
1166 return m_start + i * m_step;
1170 inline auto xstepped_range<T>::size() const noexcept -> size_type
1176 inline auto xstepped_range<T>::step_size() const noexcept -> size_type
1182 inline auto xstepped_range<T>::step_size(std::size_t , std::size_t n)
const noexcept -> size_type
1184 return m_step *
static_cast<size_type
>(n);
1188 inline auto xstepped_range<T>::revert_index(std::size_t i)
const noexcept -> size_type
1190 return (i - m_start) / m_step;
1194 inline bool xstepped_range<T>::contains(size_type i)
const noexcept
1196 return i >= m_start && i < m_start + m_size * m_step && ((i - m_start) % m_step == 0);
1200 inline bool xstepped_range<T>::operator==(
const self_type& rhs)
const noexcept
1202 return (m_start == rhs.m_start) && (m_size == rhs.m_size) && (m_step == rhs.m_step);
1206 inline bool xstepped_range<T>::operator!=(
const self_type& rhs)
const noexcept
1208 return !(*
this == rhs);
1216 inline xall<T>::xall(size_type size) noexcept
1222 template <
class S,
typename>
1223 inline xall<T>::operator xall<S>() const noexcept
1225 return xall<S>(
static_cast<S
>(m_size));
1229 template <
class S,
typename>
1230 inline xall<S> xall<T>::convert() const noexcept
1232 return xall<S>(*
this);
1236 inline auto xall<T>::operator()(size_type i)
const noexcept -> size_type
1242 inline auto xall<T>::size() const noexcept -> size_type
1248 inline auto xall<T>::step_size() const noexcept -> size_type
1254 inline auto xall<T>::step_size(std::size_t , std::size_t n)
const noexcept -> size_type
1256 return static_cast<size_type
>(n);
1260 inline auto xall<T>::revert_index(std::size_t i)
const noexcept -> size_type
1266 inline bool xall<T>::contains(size_type i)
const noexcept
1272 inline bool xall<T>::operator==(
const self_type& rhs)
const noexcept
1274 return m_size == rhs.m_size;
1278 inline bool xall<T>::operator!=(
const self_type& rhs)
const noexcept
1280 return !(*
this == rhs);
1288 template <
class S,
typename>
1289 inline xnewaxis<T>::operator xnewaxis<S>() const noexcept
1291 return xnewaxis<S>();
1295 template <
class S,
typename>
1296 inline xnewaxis<S> xnewaxis<T>::convert() const noexcept
1298 return xnewaxis<S>(*
this);
1302 inline auto xnewaxis<T>::operator()(size_type)
const noexcept -> size_type
1308 inline auto xnewaxis<T>::size() const noexcept -> size_type
1314 inline auto xnewaxis<T>::step_size() const noexcept -> size_type
1320 inline auto xnewaxis<T>::step_size(std::size_t , std::size_t )
const noexcept -> size_type
1326 inline auto xnewaxis<T>::revert_index(std::size_t i)
const noexcept -> size_type
1332 inline bool xnewaxis<T>::contains(size_type i)
const noexcept
1338 inline bool xnewaxis<T>::operator==(
const self_type& )
const noexcept
1344 inline bool xnewaxis<T>::operator!=(
const self_type& )
const noexcept
1354 template <
class C,
typename>
1355 inline xkeep_slice<T>::xkeep_slice(C& cont)
1356 : m_raw_indices(cont.begin(), cont.end())
1361 inline xkeep_slice<T>::xkeep_slice(container_type&& cont)
1362 : m_raw_indices(std::move(cont))
1368 inline xkeep_slice<T>::xkeep_slice(std::initializer_list<S> t)
1369 : m_raw_indices(t.size())
1374 m_raw_indices.begin(),
1377 return static_cast<size_type>(t);
1383 template <
class S,
typename>
1384 inline xkeep_slice<T>::operator xkeep_slice<S>() const noexcept
1387 using us_type =
typename container_type::size_type;
1388 us_type sz =
static_cast<us_type
>(size());
1389 ret.m_raw_indices.resize(sz);
1390 ret.m_indices.resize(sz);
1392 m_raw_indices.cbegin(),
1393 m_raw_indices.cend(),
1394 ret.m_raw_indices.begin(),
1397 return static_cast<S>(val);
1403 ret.m_indices.begin(),
1406 return static_cast<S>(val);
1413 template <
class S,
typename>
1414 inline xkeep_slice<S> xkeep_slice<T>::convert() const noexcept
1416 return xkeep_slice<S>(*
this);
1420 inline void xkeep_slice<T>::normalize(std::size_t shape)
1422 m_indices.resize(m_raw_indices.size());
1423 std::size_t sz = m_indices.size();
1424 for (std::size_t i = 0; i < sz; ++i)
1426 m_indices[i] = m_raw_indices[i] < 0 ?
static_cast<size_type
>(shape) + m_raw_indices[i]
1432 inline auto xkeep_slice<T>::operator()(size_type i)
const noexcept -> size_type
1434 return m_indices.size() == size_type(1) ? m_indices.front() : m_indices[
static_cast<std::size_t
>(i)];
1438 inline auto xkeep_slice<T>::size() const noexcept -> size_type
1440 return static_cast<size_type
>(m_raw_indices.size());
1444 inline auto xkeep_slice<T>::step_size(std::size_t i, std::size_t n)
const noexcept -> size_type
1446 if (m_indices.size() == 1)
1450 if (i + n >= m_indices.size())
1452 return m_indices.back() - m_indices[i] + 1;
1456 return m_indices[i + n] - m_indices[i];
1461 inline auto xkeep_slice<T>::revert_index(std::size_t i)
const -> size_type
1463 auto it = std::find(m_indices.begin(), m_indices.end(), i);
1464 if (it != m_indices.end())
1466 return std::distance(m_indices.begin(), it);
1470 XTENSOR_THROW(std::runtime_error,
"Index i (" + std::to_string(i) +
") not in indices of islice.");
1475 inline bool xkeep_slice<T>::contains(size_type i)
const noexcept
1477 return (std::find(m_indices.begin(), m_indices.end(), i) == m_indices.end()) ? false :
true;
1481 inline bool xkeep_slice<T>::operator==(
const self_type& rhs)
const noexcept
1483 return m_indices == rhs.m_indices;
1487 inline bool xkeep_slice<T>::operator!=(
const self_type& rhs)
const noexcept
1489 return !(*
this == rhs);
1497 template <
class C,
typename>
1498 inline xdrop_slice<T>::xdrop_slice(C& cont)
1499 : m_raw_indices(cont.begin(), cont.end())
1504 inline xdrop_slice<T>::xdrop_slice(container_type&& cont)
1505 : m_raw_indices(std::move(cont))
1511 inline xdrop_slice<T>::xdrop_slice(std::initializer_list<S> t)
1512 : m_raw_indices(t.size())
1517 m_raw_indices.begin(),
1520 return static_cast<size_type>(t);
1526 template <
class S,
typename>
1527 inline xdrop_slice<T>::operator xdrop_slice<S>() const noexcept
1530 ret.m_raw_indices.resize(m_raw_indices.size());
1531 ret.m_indices.resize(m_indices.size());
1533 m_raw_indices.cbegin(),
1534 m_raw_indices.cend(),
1535 ret.m_raw_indices.begin(),
1538 return static_cast<S>(val);
1544 ret.m_indices.begin(),
1547 return static_cast<S>(val);
1553 std::inserter(ret.m_inc, ret.m_inc.begin()),
1556 return std::make_pair(static_cast<S>(val.first), static_cast<S>(val.second));
1559 ret.m_size =
static_cast<S
>(m_size);
1564 template <
class S,
typename>
1565 inline xdrop_slice<S> xdrop_slice<T>::convert() const noexcept
1567 return xdrop_slice<S>(*
this);
1571 inline void xdrop_slice<T>::normalize(std::size_t shape)
1573 m_size =
static_cast<size_type
>(shape - m_raw_indices.size());
1575 m_indices.resize(m_raw_indices.size());
1576 std::size_t sz = m_indices.size();
1577 for (std::size_t i = 0; i < sz; ++i)
1579 m_indices[i] = m_raw_indices[i] < 0 ?
static_cast<size_type
>(shape) + m_raw_indices[i]
1582 size_type cum = size_type(0);
1583 size_type prev_cum = cum;
1584 for (std::size_t i = 0; i < sz; ++i)
1586 std::size_t ind = i;
1587 size_type d = m_indices[i];
1588 while (i + 1 < sz && m_indices[i + 1] == m_indices[i] + 1)
1592 cum += (
static_cast<size_type
>(i) -
static_cast<size_type
>(ind)) + 1;
1593 m_inc[d - prev_cum] = cum;
1599 inline auto xdrop_slice<T>::operator()(size_type i)
const noexcept -> size_type
1601 if (m_inc.empty() || i < m_inc.begin()->first)
1607 auto iter = --m_inc.upper_bound(i);
1608 return i + iter->second;
1613 inline auto xdrop_slice<T>::size() const noexcept -> size_type
1619 inline auto xdrop_slice<T>::step_size(std::size_t i, std::size_t n)
const noexcept -> size_type
1621 if (i + n >=
static_cast<std::size_t
>(m_size))
1623 return (*
this)(
static_cast<size_type
>(m_size - 1)) - (*this)(
static_cast<size_type
>(i)) + 1;
1627 return (*
this)(
static_cast<size_type
>(i + n)) - (*this)(
static_cast<size_type
>(i));
1632 inline auto xdrop_slice<T>::revert_index(std::size_t i)
const -> size_type
1634 if (i < m_inc.begin()->first)
1640 auto iter = --m_inc.lower_bound(i);
1641 auto check = iter->first + iter->second;
1646 return i - iter->second;
1651 inline bool xdrop_slice<T>::contains(size_type i)
const noexcept
1653 return (std::find(m_indices.begin(), m_indices.end(), i) == m_indices.end()) ? true :
false;
1657 inline bool xdrop_slice<T>::operator==(
const self_type& rhs)
const noexcept
1659 return m_indices == rhs.m_indices;
1663 inline bool xdrop_slice<T>::operator!=(
const self_type& rhs)
const noexcept
1665 return !(*
this == rhs);
1669#undef XTENSOR_CONSTEXPR
standard mathematical functions for xexpressions
auto range(A start_val, B stop_val)
Select a range from start_val to stop_val (excluded).
auto all() noexcept
Returns a slice representing a full dimension, to be used as an argument of view function.
auto newaxis() noexcept
Returns a slice representing a new axis of length one, to be used as an argument of view function.
auto ellipsis() noexcept
Returns a slice representing all remaining dimensions, and selecting all in these dimensions.
detail::disable_integral_keep< T > keep(T &&indices)
Create a non-contigous slice from a container of indices to keep.
detail::disable_integral_drop< T > drop(T &&indices)
Create a non-contigous slice from a container of indices to drop.