xtensor
Loading...
Searching...
No Matches
xslice.hpp
1/***************************************************************************
2 * Copyright (c) Johan Mabille, Sylvain Corlay and Wolf Vollprecht *
3 * Copyright (c) QuantStack *
4 * *
5 * Distributed under the terms of the BSD 3-Clause License. *
6 * *
7 * The full license is in the file LICENSE, distributed with this software. *
8 ****************************************************************************/
9
10#ifndef XTENSOR_SLICE_HPP
11#define XTENSOR_SLICE_HPP
12
13#include <cstddef>
14#include <map>
15#include <type_traits>
16#include <utility>
17
18#include <xtl/xtype_traits.hpp>
19
20#include "xstorage.hpp"
21#include "xtensor_config.hpp"
22#include "xutils.hpp"
23
24#ifndef XTENSOR_CONSTEXPR
25#if (defined(_MSC_VER) || __GNUC__ < 8)
26#define XTENSOR_CONSTEXPR inline
27#define XTENSOR_GLOBAL_CONSTEXPR static const
28#else
29#define XTENSOR_CONSTEXPR constexpr
30#define XTENSOR_GLOBAL_CONSTEXPR constexpr
31#endif
32#endif
33
34namespace xt
35{
36
37 /**********************
38 * xslice declaration *
39 **********************/
40
41 template <class D>
42 class xslice
43 {
44 public:
45
46 using derived_type = D;
47
48 derived_type& derived_cast() noexcept;
49 const derived_type& derived_cast() const noexcept;
50
51 protected:
52
53 xslice() = default;
54 ~xslice() = default;
55
56 xslice(const xslice&) = default;
57 xslice& operator=(const xslice&) = default;
58
59 xslice(xslice&&) = default;
60 xslice& operator=(xslice&&) = default;
61 };
62
63 template <class S>
64 using is_xslice = std::is_base_of<xslice<S>, S>;
65
66 template <class E, class R = void>
67 using disable_xslice = typename std::enable_if<!is_xslice<E>::value, R>::type;
68
69 template <class... E>
70 using has_xslice = xtl::disjunction<is_xslice<E>...>;
71
72 /**************
73 * slice tags *
74 **************/
75
76#define DEFINE_TAG_CONVERSION(NAME) \
77 template <class T> \
78 XTENSOR_CONSTEXPR NAME convert() const noexcept \
79 { \
80 return NAME(); \
81 }
82
83 struct xall_tag
84 {
85 DEFINE_TAG_CONVERSION(xall_tag)
86 };
87
89 {
90 DEFINE_TAG_CONVERSION(xnewaxis_tag)
91 };
92
94 {
95 DEFINE_TAG_CONVERSION(xellipsis_tag)
96 };
97
98#undef DEFINE_TAG_CONVERSION
99
100 /**********************
101 * xrange declaration *
102 **********************/
103
104 template <class T>
105 class xrange : public xslice<xrange<T>>
106 {
107 public:
108
109 using size_type = T;
110 using self_type = xrange<T>;
111
112 xrange() = default;
113 xrange(size_type start_val, size_type stop_val) noexcept;
114
116 operator xrange<S>() const noexcept;
117
118 // Same as implicit conversion operator but more convenient to call
119 // from a variant visitor
121 xrange<S> convert() const noexcept;
122
123 size_type operator()(size_type i) const noexcept;
124
125 size_type size() const noexcept;
126 size_type step_size() const noexcept;
127 size_type step_size(std::size_t i, std::size_t n = 1) const noexcept;
128 size_type revert_index(std::size_t i) const noexcept;
129
130 bool contains(size_type i) const noexcept;
131
132 bool operator==(const self_type& rhs) const noexcept;
133 bool operator!=(const self_type& rhs) const noexcept;
134
135 private:
136
137 size_type m_start;
138 size_type m_size;
139
140 template <class S>
141 friend class xrange;
142 };
143
144 /******************************
145 * xstepped_range declaration *
146 ******************************/
147
148 template <class T>
149 class xstepped_range : public xslice<xstepped_range<T>>
150 {
151 public:
152
153 using size_type = T;
155
156 xstepped_range() = default;
157 xstepped_range(size_type start_val, size_type stop_val, size_type step) noexcept;
158
160 operator xstepped_range<S>() const noexcept;
161
162 // Same as implicit conversion operator but more convenient to call
163 // from a variant visitor
165 xstepped_range<S> convert() const noexcept;
166
167 size_type operator()(size_type i) const noexcept;
168
169 size_type size() const noexcept;
170 size_type step_size() const noexcept;
171 size_type step_size(std::size_t i, std::size_t n = 1) const noexcept;
172 size_type revert_index(std::size_t i) const noexcept;
173
174 bool contains(size_type i) const noexcept;
175
176 bool operator==(const self_type& rhs) const noexcept;
177 bool operator!=(const self_type& rhs) const noexcept;
178
179 private:
180
181 size_type m_start;
182 size_type m_size;
183 size_type m_step;
184
185 template <class S>
186 friend class xstepped_range;
187 };
188
189 /********************
190 * xall declaration *
191 ********************/
192
193 template <class T>
194 class xall : public xslice<xall<T>>
195 {
196 public:
197
198 using size_type = T;
199 using self_type = xall<T>;
200
201 xall() = default;
202 explicit xall(size_type size) noexcept;
203
205 operator xall<S>() const noexcept;
206
207 // Same as implicit conversion operator but more convenient to call
208 // from a variant visitor
210 xall<S> convert() const noexcept;
211
212 size_type operator()(size_type i) const noexcept;
213
214 size_type size() const noexcept;
215 size_type step_size() const noexcept;
216 size_type step_size(std::size_t i, std::size_t n = 1) const noexcept;
217 size_type revert_index(std::size_t i) const noexcept;
218
219 bool contains(size_type i) const noexcept;
220
221 bool operator==(const self_type& rhs) const noexcept;
222 bool operator!=(const self_type& rhs) const noexcept;
223
224 private:
225
226 size_type m_size;
227 };
228
234 inline auto all() noexcept
235 {
236 return xall_tag();
237 }
238
255 inline auto ellipsis() noexcept
256 {
257 return xellipsis_tag();
258 }
259
260 /************************
261 * xnewaxis declaration *
262 ************************/
263
264 template <class T>
265 class xnewaxis : public xslice<xnewaxis<T>>
266 {
267 public:
268
269 using size_type = T;
270 using self_type = xnewaxis<T>;
271
272 xnewaxis() = default;
273
275 operator xnewaxis<S>() const noexcept;
276
277 // Same as implicit conversion operator but more convenient to call
278 // from a variant visitor
280 xnewaxis<S> convert() const noexcept;
281
282 size_type operator()(size_type i) const noexcept;
283
284 size_type size() const noexcept;
285 size_type step_size() const noexcept;
286 size_type step_size(std::size_t i, std::size_t n = 1) const noexcept;
287 size_type revert_index(std::size_t i) const noexcept;
288
289 bool contains(size_type i) const noexcept;
290
291 bool operator==(const self_type& rhs) const noexcept;
292 bool operator!=(const self_type& rhs) const noexcept;
293 };
294
300 inline auto newaxis() noexcept
301 {
302 return xnewaxis_tag();
303 }
304
305 /***************************
306 * xkeep_slice declaration *
307 ***************************/
308
309 template <class T>
310 class xkeep_slice;
311
312 namespace detail
313 {
314 template <class T>
315 struct is_xkeep_slice : std::false_type
316 {
317 };
318
319 template <class T>
320 struct is_xkeep_slice<xkeep_slice<T>> : std::true_type
321 {
322 };
323
324 template <class T>
325 using disable_xkeep_slice_t = std::enable_if_t<!is_xkeep_slice<std::decay_t<T>>::value, void>;
326
327 template <class T>
328 using enable_xkeep_slice_t = std::enable_if_t<is_xkeep_slice<std::decay_t<T>>::value, void>;
329 }
330
331 template <class T>
332 class xkeep_slice : public xslice<xkeep_slice<T>>
333 {
334 public:
335
337 using size_type = typename container_type::value_type;
339
340 template <class C, typename = detail::disable_xkeep_slice_t<C>>
341 explicit xkeep_slice(C& cont);
342 explicit xkeep_slice(container_type&& cont);
343
344 template <class S>
345 xkeep_slice(std::initializer_list<S> t);
346
348 operator xkeep_slice<S>() const noexcept;
349
350 // Same as implicit conversion operator but more convenient to call
351 // from a variant visitor
353 xkeep_slice<S> convert() const noexcept;
354
355 size_type operator()(size_type i) const noexcept;
356 size_type size() const noexcept;
357
358 void normalize(std::size_t s);
359
360 size_type step_size(std::size_t i, std::size_t n = 1) const noexcept;
361 size_type revert_index(std::size_t i) const;
362
363 bool contains(size_type i) const noexcept;
364
365 bool operator==(const self_type& rhs) const noexcept;
366 bool operator!=(const self_type& rhs) const noexcept;
367
368 private:
369
370 xkeep_slice() = default;
371
372 container_type m_indices;
373 container_type m_raw_indices;
374
375 template <class S>
376 friend class xkeep_slice;
377 };
378
379 namespace detail
380 {
381 template <class T>
382 using disable_integral_keep = std::enable_if_t<
383 !xtl::is_integral<std::decay_t<T>>::value,
385
386 template <class T, class R>
387 using enable_integral_keep = std::enable_if_t<xtl::is_integral<T>::value, xkeep_slice<R>>;
388 }
389
404 template <class T>
405 inline detail::disable_integral_keep<T> keep(T&& indices)
406 {
408 }
409
410 template <class R = std::ptrdiff_t, class T>
411 inline detail::enable_integral_keep<T, R> keep(T i)
412 {
413 using slice_type = xkeep_slice<R>;
414 using container_type = typename slice_type::container_type;
415 container_type tmp = {static_cast<R>(i)};
416 return slice_type(std::move(tmp));
417 }
418
419 template <class R = std::ptrdiff_t, class Arg0, class Arg1, class... Args>
420 inline xkeep_slice<R> keep(Arg0 i0, Arg1 i1, Args... args)
421 {
422 using slice_type = xkeep_slice<R>;
423 using container_type = typename slice_type::container_type;
424 container_type tmp = {static_cast<R>(i0), static_cast<R>(i1), static_cast<R>(args)...};
425 return slice_type(std::move(tmp));
426 }
427
428 /***************************
429 * xdrop_slice declaration *
430 ***************************/
431
432 template <class T>
433 class xdrop_slice;
434
435 namespace detail
436 {
437 template <class T>
438 struct is_xdrop_slice : std::false_type
439 {
440 };
441
442 template <class T>
443 struct is_xdrop_slice<xdrop_slice<T>> : std::true_type
444 {
445 };
446
447 template <class T>
448 using disable_xdrop_slice_t = std::enable_if_t<!is_xdrop_slice<std::decay_t<T>>::value, void>;
449
450 template <class T>
451 using enable_xdrop_slice_t = std::enable_if_t<is_xdrop_slice<std::decay_t<T>>::value, void>;
452 }
453
454 template <class T>
455 class xdrop_slice : public xslice<xdrop_slice<T>>
456 {
457 public:
458
460 using size_type = typename container_type::value_type;
462
463 template <class C, typename = detail::disable_xdrop_slice_t<C>>
464 explicit xdrop_slice(C& cont);
465 explicit xdrop_slice(container_type&& cont);
466
467 template <class S>
468 xdrop_slice(std::initializer_list<S> t);
469
471 operator xdrop_slice<S>() const noexcept;
472
473 // Same as implicit conversion operator but more convenient to call
474 // from a variant visitor
476 xdrop_slice<S> convert() const noexcept;
477
478 size_type operator()(size_type i) const noexcept;
479 size_type size() const noexcept;
480
481 void normalize(std::size_t s);
482
483 size_type step_size(std::size_t i, std::size_t n = 1) const noexcept;
484 size_type revert_index(std::size_t i) const;
485
486 bool contains(size_type i) const noexcept;
487
488 bool operator==(const self_type& rhs) const noexcept;
489 bool operator!=(const self_type& rhs) const noexcept;
490
491 private:
492
493 xdrop_slice() = default;
494
495 container_type m_indices;
496 container_type m_raw_indices;
497 std::map<size_type, size_type> m_inc;
498 size_type m_size;
499
500 template <class S>
501 friend class xdrop_slice;
502 };
503
504 namespace detail
505 {
506 template <class T>
507 using disable_integral_drop = std::enable_if_t<
508 !xtl::is_integral<std::decay_t<T>>::value,
510
511 template <class T, class R>
512 using enable_integral_drop = std::enable_if_t<xtl::is_integral<T>::value, xdrop_slice<R>>;
513 }
514
528 template <class T>
529 inline detail::disable_integral_drop<T> drop(T&& indices)
530 {
532 }
533
534 template <class R = std::ptrdiff_t, class T>
535 inline detail::enable_integral_drop<T, R> drop(T i)
536 {
537 using slice_type = xdrop_slice<R>;
538 using container_type = typename slice_type::container_type;
539 container_type tmp = {static_cast<R>(i)};
540 return slice_type(std::move(tmp));
541 }
542
543 template <class R = std::ptrdiff_t, class Arg0, class Arg1, class... Args>
544 inline xdrop_slice<R> drop(Arg0 i0, Arg1 i1, Args... args)
545 {
546 using slice_type = xdrop_slice<R>;
547 using container_type = typename slice_type::container_type;
548 container_type tmp = {static_cast<R>(i0), static_cast<R>(i1), static_cast<R>(args)...};
549 return slice_type(std::move(tmp));
550 }
551
552 /******************************
553 * xrange_adaptor declaration *
554 ******************************/
555
556 template <class A, class B = A, class C = A>
558 {
560 : m_start(start_val)
561 , m_stop(stop_val)
562 , m_step(step)
563 {
564 }
565
566 template <class MI = A, class MA = B, class STEP = C>
567 inline std::enable_if_t<
568 xtl::is_integral<MI>::value && xtl::is_integral<MA>::value && xtl::is_integral<STEP>::value,
570 get(std::size_t size) const
571 {
572 return get_stepped_range(m_start, m_stop, m_step, size);
573 }
574
575 template <class MI = A, class MA = B, class STEP = C>
576 inline std::enable_if_t<
577 !xtl::is_integral<MI>::value && xtl::is_integral<MA>::value && xtl::is_integral<STEP>::value,
579 get(std::size_t size) const
580 {
581 return get_stepped_range(m_step > 0 ? 0 : static_cast<std::ptrdiff_t>(size) - 1, m_stop, m_step, size);
582 }
583
584 template <class MI = A, class MA = B, class STEP = C>
585 inline std::enable_if_t<
586 xtl::is_integral<MI>::value && !xtl::is_integral<MA>::value && xtl::is_integral<STEP>::value,
588 get(std::size_t size) const
589 {
590 auto sz = static_cast<std::ptrdiff_t>(size);
591 return get_stepped_range(m_start, m_step > 0 ? sz : -(sz + 1), m_step, size);
592 }
593
594 template <class MI = A, class MA = B, class STEP = C>
595 inline std::enable_if_t<
596 xtl::is_integral<MI>::value && xtl::is_integral<MA>::value && !xtl::is_integral<STEP>::value,
598 get(std::size_t size) const
599 {
600 return xrange<std::ptrdiff_t>(normalize(m_start, size), normalize(m_stop, size));
601 }
602
603 template <class MI = A, class MA = B, class STEP = C>
604 inline std::enable_if_t<
605 !xtl::is_integral<MI>::value && !xtl::is_integral<MA>::value && xtl::is_integral<STEP>::value,
607 get(std::size_t size) const
608 {
609 std::ptrdiff_t start = m_step >= 0 ? 0 : static_cast<std::ptrdiff_t>(size) - 1;
610 std::ptrdiff_t stop = m_step >= 0 ? static_cast<std::ptrdiff_t>(size) : -1;
611 return xstepped_range<std::ptrdiff_t>(start, stop, m_step);
612 }
613
614 template <class MI = A, class MA = B, class STEP = C>
615 inline std::enable_if_t<
616 xtl::is_integral<MI>::value && !xtl::is_integral<MA>::value && !xtl::is_integral<STEP>::value,
618 get(std::size_t size) const
619 {
620 return xrange<std::ptrdiff_t>(normalize(m_start, size), static_cast<std::ptrdiff_t>(size));
621 }
622
623 template <class MI = A, class MA = B, class STEP = C>
624 inline std::enable_if_t<
625 !xtl::is_integral<MI>::value && xtl::is_integral<MA>::value && !xtl::is_integral<STEP>::value,
627 get(std::size_t size) const
628 {
629 return xrange<std::ptrdiff_t>(0, normalize(m_stop, size));
630 }
631
632 template <class MI = A, class MA = B, class STEP = C>
633 inline std::enable_if_t<
634 !xtl::is_integral<MI>::value && !xtl::is_integral<MA>::value && !xtl::is_integral<STEP>::value,
636 get(std::size_t size) const
637 {
638 return xall<std::ptrdiff_t>(static_cast<std::ptrdiff_t>(size));
639 }
640
641 A start() const
642 {
643 return m_start;
644 }
645
646 B stop() const
647 {
648 return m_stop;
649 }
650
651 C step() const
652 {
653 return m_step;
654 }
655
656 private:
657
658 static auto normalize(std::ptrdiff_t val, std::size_t ssize)
659 {
660 std::ptrdiff_t size = static_cast<std::ptrdiff_t>(ssize);
661 val = (val >= 0) ? val : val + size;
662 return (std::max)(std::ptrdiff_t(0), (std::min)(size, val));
663 }
664
665 static auto
666 get_stepped_range(std::ptrdiff_t start, std::ptrdiff_t stop, std::ptrdiff_t step, std::size_t ssize)
667 {
668 std::ptrdiff_t size = static_cast<std::ptrdiff_t>(ssize);
669 start = (start >= 0) ? start : start + size;
670 stop = (stop >= 0) ? stop : stop + size;
671
672 if (step > 0)
673 {
674 start = (std::max)(std::ptrdiff_t(0), (std::min)(size, start));
675 stop = (std::max)(std::ptrdiff_t(0), (std::min)(size, stop));
676 }
677 else
678 {
679 start = (std::max)(std::ptrdiff_t(-1), (std::min)(size - 1, start));
680 stop = (std::max)(std::ptrdiff_t(-1), (std::min)(size - 1, stop));
681 }
682
683 return xstepped_range<std::ptrdiff_t>(start, stop, step);
684 }
685
686 A m_start;
687 B m_stop;
688 C m_step;
689 };
690
691 /*******************************
692 * Placeholders and rangemaker *
693 *******************************/
694
695 namespace placeholders
696 {
697 // xtensor universal placeholder
698 struct xtuph
699 {
700 };
701
702 template <class... Args>
704 {
705 std::ptrdiff_t rng[3]; // = { 0, 0, 0 };
706 };
707
708 XTENSOR_CONSTEXPR xtuph get_tuph_or_val(std::ptrdiff_t /*val*/, std::true_type)
709 {
710 return xtuph();
711 }
712
713 XTENSOR_CONSTEXPR std::ptrdiff_t get_tuph_or_val(std::ptrdiff_t val, std::false_type)
714 {
715 return val;
716 }
717
718 template <class A, class B, class C>
719 struct rangemaker<A, B, C>
720 {
721 XTENSOR_CONSTEXPR operator xrange_adaptor<A, B, C>()
722 {
724 {get_tuph_or_val(rng[0], std::is_same<A, xtuph>()),
725 get_tuph_or_val(rng[1], std::is_same<B, xtuph>()),
726 get_tuph_or_val(rng[2], std::is_same<C, xtuph>())}
727 );
728 }
729
730 std::ptrdiff_t rng[3]; // = { 0, 0, 0 };
731 };
732
733 template <class A, class B>
734 struct rangemaker<A, B>
735 {
736 XTENSOR_CONSTEXPR operator xrange_adaptor<A, B, xt::placeholders::xtuph>()
737 {
739 {get_tuph_or_val(rng[0], std::is_same<A, xtuph>()),
740 get_tuph_or_val(rng[1], std::is_same<B, xtuph>()),
741 xtuph()}
742 );
743 }
744
745 std::ptrdiff_t rng[3]; // = { 0, 0, 0 };
746 };
747
748 template <class... OA>
749 XTENSOR_CONSTEXPR auto operator|(const rangemaker<OA...>& rng, const std::ptrdiff_t& t)
750 {
751 auto nrng = rangemaker<OA..., std::ptrdiff_t>({rng.rng[0], rng.rng[1], rng.rng[2]});
752 nrng.rng[sizeof...(OA)] = t;
753 return nrng;
754 }
755
756 template <class... OA>
757 XTENSOR_CONSTEXPR auto operator|(const rangemaker<OA...>& rng, const xt::placeholders::xtuph& /*t*/)
758 {
759 auto nrng = rangemaker<OA..., xt::placeholders::xtuph>({rng.rng[0], rng.rng[1], rng.rng[2]});
760 return nrng;
761 }
762
763 XTENSOR_GLOBAL_CONSTEXPR xtuph _{};
764 XTENSOR_GLOBAL_CONSTEXPR rangemaker<> _r = rangemaker<>({0, 0, 0});
765 XTENSOR_GLOBAL_CONSTEXPR xall_tag _a{};
766 XTENSOR_GLOBAL_CONSTEXPR xnewaxis_tag _n{};
767 XTENSOR_GLOBAL_CONSTEXPR xellipsis_tag _e{};
768 }
769
770 inline auto xnone()
771 {
772 return placeholders::xtuph();
773 }
774
775 namespace detail
776 {
777 template <class T, class E = void>
778 struct cast_if_integer
779 {
780 using type = T;
781
782 type operator()(T t)
783 {
784 return t;
785 }
786 };
787
788 template <class T>
789 struct cast_if_integer<T, std::enable_if_t<xtl::is_integral<T>::value>>
790 {
791 using type = std::ptrdiff_t;
792
793 type operator()(T t)
794 {
795 return static_cast<type>(t);
796 }
797 };
798
799 template <class T>
800 using cast_if_integer_t = typename cast_if_integer<T>::type;
801 }
802
817 template <class A, class B>
818 inline auto range(A start_val, B stop_val)
819 {
820 return xrange_adaptor<detail::cast_if_integer_t<A>, detail::cast_if_integer_t<B>, placeholders::xtuph>(
821 detail::cast_if_integer<A>{}(start_val),
822 detail::cast_if_integer<B>{}(stop_val),
824 );
825 }
826
838 template <class A, class B, class C>
839 inline auto range(A start_val, B stop_val, C step)
840 {
841 return xrange_adaptor<detail::cast_if_integer_t<A>, detail::cast_if_integer_t<B>, detail::cast_if_integer_t<C>>(
842 detail::cast_if_integer<A>{}(start_val),
843 detail::cast_if_integer<B>{}(stop_val),
844 detail::cast_if_integer<C>{}(step)
845 );
846 }
847
848 /******************************************************
849 * homogeneous get_size for integral types and slices *
850 ******************************************************/
851
852 template <class S>
853 inline disable_xslice<S, std::size_t> get_size(const S&) noexcept
854 {
855 return 1;
856 }
857
858 template <class S>
859 inline auto get_size(const xslice<S>& slice) noexcept
860 {
861 return slice.derived_cast().size();
862 }
863
864 /*******************************************************
865 * homogeneous step_size for integral types and slices *
866 *******************************************************/
867
868 template <class S>
869 inline disable_xslice<S, std::size_t> step_size(const S&, std::size_t) noexcept
870 {
871 return 0;
872 }
873
874 template <class S>
875 inline disable_xslice<S, std::size_t> step_size(const S&, std::size_t, std::size_t) noexcept
876 {
877 return 0;
878 }
879
880 template <class S>
881 inline auto step_size(const xslice<S>& slice, std::size_t idx) noexcept
882 {
883 return slice.derived_cast().step_size(idx);
884 }
885
886 template <class S>
887 inline auto step_size(const xslice<S>& slice, std::size_t idx, std::size_t n) noexcept
888 {
889 return slice.derived_cast().step_size(idx, n);
890 }
891
892 /*********************************************
893 * homogeneous value for integral and slices *
894 *********************************************/
895
896 template <class S, class I>
897 inline disable_xslice<S, std::size_t> value(const S& s, I) noexcept
898 {
899 return static_cast<std::size_t>(s);
900 }
901
902 template <class S, class I>
903 inline auto value(const xslice<S>& slice, I i) noexcept
904 {
905 using ST = typename S::size_type;
906 return slice.derived_cast()(static_cast<ST>(i));
907 }
908
909 /****************************************
910 * homogeneous get_slice_implementation *
911 ****************************************/
912
913 namespace detail
914 {
915 template <class T>
916 struct slice_implementation_getter
917 {
918 template <class E, class SL>
919 inline decltype(auto) operator()(E& e, SL&& slice, std::size_t index) const
920 {
921 return get_slice(e, std::forward<SL>(slice), index, xtl::is_signed<std::decay_t<SL>>());
922 }
923
924 private:
925
926 template <class E, class SL>
927 inline decltype(auto) get_slice(E&, SL&& slice, std::size_t, std::false_type) const
928 {
929 return std::forward<SL>(slice);
930 }
931
932 template <class E, class SL>
933 inline decltype(auto) get_slice(E& e, SL&& slice, std::size_t index, std::true_type) const
934 {
935 using int_type = std::decay_t<SL>;
936 return slice < int_type(0) ? slice + static_cast<std::ptrdiff_t>(e.shape(index))
937 : std::ptrdiff_t(slice);
938 }
939 };
940
941 struct keep_drop_getter
942 {
943 template <class E, class SL>
944 inline decltype(auto) operator()(E& e, SL&& slice, std::size_t index) const
945 {
946 slice.normalize(e.shape()[index]);
947 return std::forward<SL>(slice);
948 }
949
950 template <class E, class SL>
951 inline auto operator()(E& e, const SL& slice, std::size_t index) const
952 {
953 return this->operator()(e, SL(slice), index);
954 }
955 };
956
957 template <class T>
958 struct slice_implementation_getter<xkeep_slice<T>> : keep_drop_getter
959 {
960 };
961
962 template <class T>
963 struct slice_implementation_getter<xdrop_slice<T>> : keep_drop_getter
964 {
965 };
966
967 template <>
968 struct slice_implementation_getter<xall_tag>
969 {
970 template <class E, class SL>
971 inline auto operator()(E& e, SL&&, std::size_t index) const
972 {
973 return xall<typename E::size_type>(e.shape()[index]);
974 }
975 };
976
977 template <>
978 struct slice_implementation_getter<xnewaxis_tag>
979 {
980 template <class E, class SL>
981 inline auto operator()(E&, SL&&, std::size_t) const
982 {
983 return xnewaxis<typename E::size_type>();
984 }
985 };
986
987 template <class A, class B, class C>
988 struct slice_implementation_getter<xrange_adaptor<A, B, C>>
989 {
990 template <class E, class SL>
991 inline auto operator()(E& e, SL&& adaptor, std::size_t index) const
992 {
993 return adaptor.get(e.shape()[index]);
994 }
995 };
996 }
997
998 template <class E, class SL>
999 inline auto get_slice_implementation(E& e, SL&& slice, std::size_t index)
1000 {
1001 detail::slice_implementation_getter<std::decay_t<SL>> getter;
1002 return getter(e, std::forward<SL>(slice), index);
1003 }
1004
1005 /******************************
1006 * homogeneous get_slice_type *
1007 ******************************/
1008
1009 namespace detail
1010 {
1011 template <class E, class SL>
1012 struct get_slice_type_impl
1013 {
1014 using type = SL;
1015 };
1016
1017 template <class E>
1018 struct get_slice_type_impl<E, xall_tag>
1019 {
1020 using type = xall<typename E::size_type>;
1021 };
1022
1023 template <class E>
1024 struct get_slice_type_impl<E, xnewaxis_tag>
1025 {
1026 using type = xnewaxis<typename E::size_type>;
1027 };
1028
1029 template <class E, class A, class B, class C>
1030 struct get_slice_type_impl<E, xrange_adaptor<A, B, C>>
1031 {
1032 using type = decltype(xrange_adaptor<A, B, C>(A(), B(), C()).get(0));
1033 };
1034 }
1035
1036 template <class E, class SL>
1037 using get_slice_type = typename detail::get_slice_type_impl<E, std::remove_reference_t<SL>>::type;
1038
1039 /*************************
1040 * xslice implementation *
1041 *************************/
1042
1043 template <class D>
1044 inline auto xslice<D>::derived_cast() noexcept -> derived_type&
1045 {
1046 return *static_cast<derived_type*>(this);
1047 }
1048
1049 template <class D>
1050 inline auto xslice<D>::derived_cast() const noexcept -> const derived_type&
1051 {
1052 return *static_cast<const derived_type*>(this);
1053 }
1054
1055 /*************************
1056 * xrange implementation *
1057 *************************/
1058
1059 template <class T>
1060 inline xrange<T>::xrange(size_type start_val, size_type stop_val) noexcept
1061 : m_start(start_val)
1062 , m_size(stop_val > start_val ? stop_val - start_val : 0)
1063 {
1064 }
1065
1066 template <class T>
1067 template <class S, typename>
1068 inline xrange<T>::operator xrange<S>() const noexcept
1069 {
1070 xrange<S> ret;
1071 ret.m_start = static_cast<S>(m_start);
1072 ret.m_size = static_cast<S>(m_size);
1073 return ret;
1074 }
1075
1076 template <class T>
1077 template <class S, typename>
1078 inline xrange<S> xrange<T>::convert() const noexcept
1079 {
1080 return xrange<S>(*this);
1081 }
1082
1083 template <class T>
1084 inline auto xrange<T>::operator()(size_type i) const noexcept -> size_type
1085 {
1086 return m_start + i;
1087 }
1088
1089 template <class T>
1090 inline auto xrange<T>::size() const noexcept -> size_type
1091 {
1092 return m_size;
1093 }
1094
1095 template <class T>
1096 inline auto xrange<T>::step_size() const noexcept -> size_type
1097 {
1098 return 1;
1099 }
1100
1101 template <class T>
1102 inline auto xrange<T>::step_size(std::size_t /*i*/, std::size_t n) const noexcept -> size_type
1103 {
1104 return static_cast<size_type>(n);
1105 }
1106
1107 template <class T>
1108 inline auto xrange<T>::revert_index(std::size_t i) const noexcept -> size_type
1109 {
1110 return i - m_start;
1111 }
1112
1113 template <class T>
1114 inline bool xrange<T>::contains(size_type i) const noexcept
1115 {
1116 return i >= m_start && i < m_start + m_size;
1117 }
1118
1119 template <class T>
1120 inline bool xrange<T>::operator==(const self_type& rhs) const noexcept
1121 {
1122 return (m_start == rhs.m_start) && (m_size == rhs.m_size);
1123 }
1124
1125 template <class T>
1126 inline bool xrange<T>::operator!=(const self_type& rhs) const noexcept
1127 {
1128 return !(*this == rhs);
1129 }
1130
1131 /********************************
1132 * xtepped_range implementation *
1133 ********************************/
1134
1135 template <class T>
1136 inline xstepped_range<T>::xstepped_range(size_type start_val, size_type stop_val, size_type step) noexcept
1137 : m_start(start_val)
1138 , m_size(size_type(0))
1139 , m_step(step)
1140 {
1141 size_type n = stop_val - start_val;
1142 m_size = n / step + (((n < 0) ^ (step > 0)) && (n % step));
1143 }
1144
1145 template <class T>
1146 template <class S, typename>
1147 inline xstepped_range<T>::operator xstepped_range<S>() const noexcept
1148 {
1149 xstepped_range<S> ret;
1150 ret.m_start = static_cast<S>(m_start);
1151 ret.m_size = static_cast<S>(m_size);
1152 ret.m_step = static_cast<S>(m_step);
1153 return ret;
1154 }
1155
1156 template <class T>
1157 template <class S, typename>
1158 inline xstepped_range<S> xstepped_range<T>::convert() const noexcept
1159 {
1160 return xstepped_range<S>(*this);
1161 }
1162
1163 template <class T>
1164 inline auto xstepped_range<T>::operator()(size_type i) const noexcept -> size_type
1165 {
1166 return m_start + i * m_step;
1167 }
1168
1169 template <class T>
1170 inline auto xstepped_range<T>::size() const noexcept -> size_type
1171 {
1172 return m_size;
1173 }
1174
1175 template <class T>
1176 inline auto xstepped_range<T>::step_size() const noexcept -> size_type
1177 {
1178 return m_step;
1179 }
1180
1181 template <class T>
1182 inline auto xstepped_range<T>::step_size(std::size_t /*i*/, std::size_t n) const noexcept -> size_type
1183 {
1184 return m_step * static_cast<size_type>(n);
1185 }
1186
1187 template <class T>
1188 inline auto xstepped_range<T>::revert_index(std::size_t i) const noexcept -> size_type
1189 {
1190 return (i - m_start) / m_step;
1191 }
1192
1193 template <class T>
1194 inline bool xstepped_range<T>::contains(size_type i) const noexcept
1195 {
1196 return i >= m_start && i < m_start + m_size * m_step && ((i - m_start) % m_step == 0);
1197 }
1198
1199 template <class T>
1200 inline bool xstepped_range<T>::operator==(const self_type& rhs) const noexcept
1201 {
1202 return (m_start == rhs.m_start) && (m_size == rhs.m_size) && (m_step == rhs.m_step);
1203 }
1204
1205 template <class T>
1206 inline bool xstepped_range<T>::operator!=(const self_type& rhs) const noexcept
1207 {
1208 return !(*this == rhs);
1209 }
1210
1211 /***********************
1212 * xall implementation *
1213 ***********************/
1214
1215 template <class T>
1216 inline xall<T>::xall(size_type size) noexcept
1217 : m_size(size)
1218 {
1219 }
1220
1221 template <class T>
1222 template <class S, typename>
1223 inline xall<T>::operator xall<S>() const noexcept
1224 {
1225 return xall<S>(static_cast<S>(m_size));
1226 }
1227
1228 template <class T>
1229 template <class S, typename>
1230 inline xall<S> xall<T>::convert() const noexcept
1231 {
1232 return xall<S>(*this);
1233 }
1234
1235 template <class T>
1236 inline auto xall<T>::operator()(size_type i) const noexcept -> size_type
1237 {
1238 return i;
1239 }
1240
1241 template <class T>
1242 inline auto xall<T>::size() const noexcept -> size_type
1243 {
1244 return m_size;
1245 }
1246
1247 template <class T>
1248 inline auto xall<T>::step_size() const noexcept -> size_type
1249 {
1250 return 1;
1251 }
1252
1253 template <class T>
1254 inline auto xall<T>::step_size(std::size_t /*i*/, std::size_t n) const noexcept -> size_type
1255 {
1256 return static_cast<size_type>(n);
1257 }
1258
1259 template <class T>
1260 inline auto xall<T>::revert_index(std::size_t i) const noexcept -> size_type
1261 {
1262 return i;
1263 }
1264
1265 template <class T>
1266 inline bool xall<T>::contains(size_type i) const noexcept
1267 {
1268 return i < m_size;
1269 }
1270
1271 template <class T>
1272 inline bool xall<T>::operator==(const self_type& rhs) const noexcept
1273 {
1274 return m_size == rhs.m_size;
1275 }
1276
1277 template <class T>
1278 inline bool xall<T>::operator!=(const self_type& rhs) const noexcept
1279 {
1280 return !(*this == rhs);
1281 }
1282
1283 /***************************
1284 * xnewaxis implementation *
1285 ***************************/
1286
1287 template <class T>
1288 template <class S, typename>
1289 inline xnewaxis<T>::operator xnewaxis<S>() const noexcept
1290 {
1291 return xnewaxis<S>();
1292 }
1293
1294 template <class T>
1295 template <class S, typename>
1296 inline xnewaxis<S> xnewaxis<T>::convert() const noexcept
1297 {
1298 return xnewaxis<S>(*this);
1299 }
1300
1301 template <class T>
1302 inline auto xnewaxis<T>::operator()(size_type) const noexcept -> size_type
1303 {
1304 return 0;
1305 }
1306
1307 template <class T>
1308 inline auto xnewaxis<T>::size() const noexcept -> size_type
1309 {
1310 return 1;
1311 }
1312
1313 template <class T>
1314 inline auto xnewaxis<T>::step_size() const noexcept -> size_type
1315 {
1316 return 0;
1317 }
1318
1319 template <class T>
1320 inline auto xnewaxis<T>::step_size(std::size_t /*i*/, std::size_t /*n*/) const noexcept -> size_type
1321 {
1322 return 0;
1323 }
1324
1325 template <class T>
1326 inline auto xnewaxis<T>::revert_index(std::size_t i) const noexcept -> size_type
1327 {
1328 return i;
1329 }
1330
1331 template <class T>
1332 inline bool xnewaxis<T>::contains(size_type i) const noexcept
1333 {
1334 return i == 0;
1335 }
1336
1337 template <class T>
1338 inline bool xnewaxis<T>::operator==(const self_type& /*rhs*/) const noexcept
1339 {
1340 return true;
1341 }
1342
1343 template <class T>
1344 inline bool xnewaxis<T>::operator!=(const self_type& /*rhs*/) const noexcept
1345 {
1346 return true;
1347 }
1348
1349 /******************************
1350 * xkeep_slice implementation *
1351 ******************************/
1352
1353 template <class T>
1354 template <class C, typename>
1355 inline xkeep_slice<T>::xkeep_slice(C& cont)
1356 : m_raw_indices(cont.begin(), cont.end())
1357 {
1358 }
1359
1360 template <class T>
1361 inline xkeep_slice<T>::xkeep_slice(container_type&& cont)
1362 : m_raw_indices(std::move(cont))
1363 {
1364 }
1365
1366 template <class T>
1367 template <class S>
1368 inline xkeep_slice<T>::xkeep_slice(std::initializer_list<S> t)
1369 : m_raw_indices(t.size())
1370 {
1371 std::transform(
1372 t.begin(),
1373 t.end(),
1374 m_raw_indices.begin(),
1375 [](auto t)
1376 {
1377 return static_cast<size_type>(t);
1378 }
1379 );
1380 }
1381
1382 template <class T>
1383 template <class S, typename>
1384 inline xkeep_slice<T>::operator xkeep_slice<S>() const noexcept
1385 {
1386 xkeep_slice<S> ret;
1387 using us_type = typename container_type::size_type;
1388 us_type sz = static_cast<us_type>(size());
1389 ret.m_raw_indices.resize(sz);
1390 ret.m_indices.resize(sz);
1391 std::transform(
1392 m_raw_indices.cbegin(),
1393 m_raw_indices.cend(),
1394 ret.m_raw_indices.begin(),
1395 [](const T& val)
1396 {
1397 return static_cast<S>(val);
1398 }
1399 );
1400 std::transform(
1401 m_indices.cbegin(),
1402 m_indices.cend(),
1403 ret.m_indices.begin(),
1404 [](const T& val)
1405 {
1406 return static_cast<S>(val);
1407 }
1408 );
1409 return ret;
1410 }
1411
1412 template <class T>
1413 template <class S, typename>
1414 inline xkeep_slice<S> xkeep_slice<T>::convert() const noexcept
1415 {
1416 return xkeep_slice<S>(*this);
1417 }
1418
1419 template <class T>
1420 inline void xkeep_slice<T>::normalize(std::size_t shape)
1421 {
1422 m_indices.resize(m_raw_indices.size());
1423 std::size_t sz = m_indices.size();
1424 for (std::size_t i = 0; i < sz; ++i)
1425 {
1426 m_indices[i] = m_raw_indices[i] < 0 ? static_cast<size_type>(shape) + m_raw_indices[i]
1427 : m_raw_indices[i];
1428 }
1429 }
1430
1431 template <class T>
1432 inline auto xkeep_slice<T>::operator()(size_type i) const noexcept -> size_type
1433 {
1434 return m_indices.size() == size_type(1) ? m_indices.front() : m_indices[static_cast<std::size_t>(i)];
1435 }
1436
1437 template <class T>
1438 inline auto xkeep_slice<T>::size() const noexcept -> size_type
1439 {
1440 return static_cast<size_type>(m_raw_indices.size());
1441 }
1442
1443 template <class T>
1444 inline auto xkeep_slice<T>::step_size(std::size_t i, std::size_t n) const noexcept -> size_type
1445 {
1446 if (m_indices.size() == 1)
1447 {
1448 return 0;
1449 }
1450 if (i + n >= m_indices.size())
1451 {
1452 return m_indices.back() - m_indices[i] + 1;
1453 }
1454 else
1455 {
1456 return m_indices[i + n] - m_indices[i];
1457 }
1458 }
1459
1460 template <class T>
1461 inline auto xkeep_slice<T>::revert_index(std::size_t i) const -> size_type
1462 {
1463 auto it = std::find(m_indices.begin(), m_indices.end(), i);
1464 if (it != m_indices.end())
1465 {
1466 return std::distance(m_indices.begin(), it);
1467 }
1468 else
1469 {
1470 XTENSOR_THROW(std::runtime_error, "Index i (" + std::to_string(i) + ") not in indices of islice.");
1471 }
1472 }
1473
1474 template <class T>
1475 inline bool xkeep_slice<T>::contains(size_type i) const noexcept
1476 {
1477 return (std::find(m_indices.begin(), m_indices.end(), i) == m_indices.end()) ? false : true;
1478 }
1479
1480 template <class T>
1481 inline bool xkeep_slice<T>::operator==(const self_type& rhs) const noexcept
1482 {
1483 return m_indices == rhs.m_indices;
1484 }
1485
1486 template <class T>
1487 inline bool xkeep_slice<T>::operator!=(const self_type& rhs) const noexcept
1488 {
1489 return !(*this == rhs);
1490 }
1491
1492 /******************************
1493 * xdrop_slice implementation *
1494 ******************************/
1495
1496 template <class T>
1497 template <class C, typename>
1498 inline xdrop_slice<T>::xdrop_slice(C& cont)
1499 : m_raw_indices(cont.begin(), cont.end())
1500 {
1501 }
1502
1503 template <class T>
1504 inline xdrop_slice<T>::xdrop_slice(container_type&& cont)
1505 : m_raw_indices(std::move(cont))
1506 {
1507 }
1508
1509 template <class T>
1510 template <class S>
1511 inline xdrop_slice<T>::xdrop_slice(std::initializer_list<S> t)
1512 : m_raw_indices(t.size())
1513 {
1514 std::transform(
1515 t.begin(),
1516 t.end(),
1517 m_raw_indices.begin(),
1518 [](auto t)
1519 {
1520 return static_cast<size_type>(t);
1521 }
1522 );
1523 }
1524
1525 template <class T>
1526 template <class S, typename>
1527 inline xdrop_slice<T>::operator xdrop_slice<S>() const noexcept
1528 {
1529 xdrop_slice<S> ret;
1530 ret.m_raw_indices.resize(m_raw_indices.size());
1531 ret.m_indices.resize(m_indices.size());
1532 std::transform(
1533 m_raw_indices.cbegin(),
1534 m_raw_indices.cend(),
1535 ret.m_raw_indices.begin(),
1536 [](const T& val)
1537 {
1538 return static_cast<S>(val);
1539 }
1540 );
1541 std::transform(
1542 m_indices.cbegin(),
1543 m_indices.cend(),
1544 ret.m_indices.begin(),
1545 [](const T& val)
1546 {
1547 return static_cast<S>(val);
1548 }
1549 );
1550 std::transform(
1551 m_inc.cbegin(),
1552 m_inc.cend(),
1553 std::inserter(ret.m_inc, ret.m_inc.begin()),
1554 [](const auto& val)
1555 {
1556 return std::make_pair(static_cast<S>(val.first), static_cast<S>(val.second));
1557 }
1558 );
1559 ret.m_size = static_cast<S>(m_size);
1560 return ret;
1561 }
1562
1563 template <class T>
1564 template <class S, typename>
1565 inline xdrop_slice<S> xdrop_slice<T>::convert() const noexcept
1566 {
1567 return xdrop_slice<S>(*this);
1568 }
1569
1570 template <class T>
1571 inline void xdrop_slice<T>::normalize(std::size_t shape)
1572 {
1573 m_size = static_cast<size_type>(shape - m_raw_indices.size());
1574
1575 m_indices.resize(m_raw_indices.size());
1576 std::size_t sz = m_indices.size();
1577 for (std::size_t i = 0; i < sz; ++i)
1578 {
1579 m_indices[i] = m_raw_indices[i] < 0 ? static_cast<size_type>(shape) + m_raw_indices[i]
1580 : m_raw_indices[i];
1581 }
1582 size_type cum = size_type(0);
1583 size_type prev_cum = cum;
1584 for (std::size_t i = 0; i < sz; ++i)
1585 {
1586 std::size_t ind = i;
1587 size_type d = m_indices[i];
1588 while (i + 1 < sz && m_indices[i + 1] == m_indices[i] + 1)
1589 {
1590 ++i;
1591 }
1592 cum += (static_cast<size_type>(i) - static_cast<size_type>(ind)) + 1;
1593 m_inc[d - prev_cum] = cum;
1594 prev_cum = cum;
1595 }
1596 }
1597
1598 template <class T>
1599 inline auto xdrop_slice<T>::operator()(size_type i) const noexcept -> size_type
1600 {
1601 if (m_inc.empty() || i < m_inc.begin()->first)
1602 {
1603 return i;
1604 }
1605 else
1606 {
1607 auto iter = --m_inc.upper_bound(i);
1608 return i + iter->second;
1609 }
1610 }
1611
1612 template <class T>
1613 inline auto xdrop_slice<T>::size() const noexcept -> size_type
1614 {
1615 return m_size;
1616 }
1617
1618 template <class T>
1619 inline auto xdrop_slice<T>::step_size(std::size_t i, std::size_t n) const noexcept -> size_type
1620 {
1621 if (i + n >= static_cast<std::size_t>(m_size))
1622 {
1623 return (*this)(static_cast<size_type>(m_size - 1)) - (*this)(static_cast<size_type>(i)) + 1;
1624 }
1625 else
1626 {
1627 return (*this)(static_cast<size_type>(i + n)) - (*this)(static_cast<size_type>(i));
1628 }
1629 }
1630
1631 template <class T>
1632 inline auto xdrop_slice<T>::revert_index(std::size_t i) const -> size_type
1633 {
1634 if (i < m_inc.begin()->first)
1635 {
1636 return i;
1637 }
1638 else
1639 {
1640 auto iter = --m_inc.lower_bound(i);
1641 auto check = iter->first + iter->second;
1642 if (check > i)
1643 {
1644 --iter;
1645 }
1646 return i - iter->second;
1647 }
1648 }
1649
1650 template <class T>
1651 inline bool xdrop_slice<T>::contains(size_type i) const noexcept
1652 {
1653 return (std::find(m_indices.begin(), m_indices.end(), i) == m_indices.end()) ? true : false;
1654 }
1655
1656 template <class T>
1657 inline bool xdrop_slice<T>::operator==(const self_type& rhs) const noexcept
1658 {
1659 return m_indices == rhs.m_indices;
1660 }
1661
1662 template <class T>
1663 inline bool xdrop_slice<T>::operator!=(const self_type& rhs) const noexcept
1664 {
1665 return !(*this == rhs);
1666 }
1667}
1668
1669#undef XTENSOR_CONSTEXPR
1670
1671#endif
standard mathematical functions for xexpressions
auto range(A start_val, B stop_val)
Select a range from start_val to stop_val (excluded).
Definition xslice.hpp:818
auto all() noexcept
Returns a slice representing a full dimension, to be used as an argument of view function.
Definition xslice.hpp:234
auto newaxis() noexcept
Returns a slice representing a new axis of length one, to be used as an argument of view function.
Definition xslice.hpp:300
auto ellipsis() noexcept
Returns a slice representing all remaining dimensions, and selecting all in these dimensions.
Definition xslice.hpp:255
detail::disable_integral_keep< T > keep(T &&indices)
Create a non-contigous slice from a container of indices to keep.
Definition xslice.hpp:405
detail::disable_integral_drop< T > drop(T &&indices)
Create a non-contigous slice from a container of indices to drop.
Definition xslice.hpp:529