@@ -3,7 +3,7 @@ extern crate libc;
3
3
use array:: Array ;
4
4
use defines:: AfError ;
5
5
use seq:: Seq ;
6
- use self :: libc:: { c_int , c_uint , c_longlong } ;
6
+ use self :: libc:: { c_double , c_int , c_uint } ;
7
7
8
8
type MutAfIndex = * mut self :: libc:: c_longlong ;
9
9
type MutAfArray = * mut self :: libc:: c_longlong ;
@@ -15,12 +15,12 @@ type IndexT = self::libc::c_longlong;
15
15
extern {
16
16
fn af_create_indexers ( indexers : MutAfIndex ) -> c_int ;
17
17
fn af_set_array_indexer ( indexer : MutAfIndex , idx : AfArray , dim : DimT ) -> c_int ;
18
- fn af_set_seq_indexer ( indexer : MutAfIndex , idx : * const Seq , dim : DimT , is_batch : c_int ) -> c_int ;
18
+ fn af_set_seq_indexer ( indexer : MutAfIndex , idx : * const SeqInternal , dim : DimT , is_batch : c_int ) -> c_int ;
19
19
fn af_release_indexers ( indexers : MutAfIndex ) -> c_int ;
20
20
21
- fn af_index ( out : MutAfArray , input : AfArray , ndims : c_uint , index : * const Seq ) -> c_int ;
21
+ fn af_index ( out : MutAfArray , input : AfArray , ndims : c_uint , index : * const SeqInternal ) -> c_int ;
22
22
fn af_lookup ( out : MutAfArray , arr : AfArray , indices : AfArray , dim : c_uint ) -> c_int ;
23
- fn af_assign_seq ( out : MutAfArray , lhs : AfArray , ndims : c_uint , indices : * const Seq , rhs : AfArray ) -> c_int ;
23
+ fn af_assign_seq ( out : MutAfArray , lhs : AfArray , ndims : c_uint , indices : * const SeqInternal , rhs : AfArray ) -> c_int ;
24
24
fn af_index_gen ( out : MutAfArray , input : AfArray , ndims : DimT , indices : * const IndexT ) -> c_int ;
25
25
fn af_assign_gen ( out : MutAfArray , lhs : AfArray , ndims : DimT , indices : * const IndexT , rhs : AfArray ) -> c_int ;
26
26
}
@@ -62,11 +62,12 @@ impl Indexable for Array {
62
62
///
63
63
/// This is used in functions [index_gen](./fn.index_gen.html) and
64
64
/// [assign_gen](./fn.assign_gen.html)
65
- impl Indexable for Seq {
65
+ impl < T : Copy > Indexable for Seq < T > where c_double : From < T > {
66
66
fn set ( & self , idxr : & Indexer , dim : u32 , is_batch : Option < bool > ) -> Result < ( ) , AfError > {
67
67
unsafe {
68
- let err_val = af_set_seq_indexer ( idxr. clone ( ) . get ( ) as MutAfIndex , self as * const Seq ,
69
- dim as DimT , is_batch. unwrap ( ) as c_int ) ;
68
+ let err_val = af_set_seq_indexer ( idxr. clone ( ) . get ( ) as MutAfIndex ,
69
+ & SeqInternal :: from_seq ( self ) as * const SeqInternal ,
70
+ dim as DimT , is_batch. unwrap ( ) as c_int ) ;
70
71
match err_val {
71
72
0 => Ok ( ( ) ) ,
72
73
_ => Err ( AfError :: from ( err_val) ) ,
@@ -130,12 +131,16 @@ impl Drop for Indexer {
130
131
/// println!("a(seq(1, 3, 1), span)");
131
132
/// print(&sub);
132
133
/// ```
133
- pub fn index ( input : & Array , seqs : & [ Seq ] ) -> Result < Array , AfError > {
134
+ pub fn index < T : Copy > ( input : & Array , seqs : & [ Seq < T > ] ) -> Result < Array , AfError >
135
+ where c_double : From < T >
136
+ {
134
137
unsafe {
135
138
let mut temp: i64 = 0 ;
139
+ // TODO: allocating a whole new array on the heap just for this is BAD
140
+ let seqs: Vec < SeqInternal > = seqs. iter ( ) . map ( |s| SeqInternal :: from_seq ( s) ) . collect ( ) ;
136
141
let err_val = af_index ( & mut temp as MutAfArray
137
142
, input. get ( ) as AfArray , seqs. len ( ) as u32
138
- , seqs. as_ptr ( ) as * const Seq ) ;
143
+ , seqs. as_ptr ( ) as * const SeqInternal ) ;
139
144
match err_val {
140
145
0 => Ok ( Array :: from ( temp) ) ,
141
146
_ => Err ( AfError :: from ( err_val) ) ,
@@ -155,8 +160,8 @@ pub fn index(input: &Array, seqs: &[Seq]) -> Result<Array, AfError> {
155
160
/// ```
156
161
#[ allow( dead_code) ]
157
162
pub fn row ( input : & Array , row_num : u64 ) -> Result < Array , AfError > {
158
- index ( input, & [ Seq :: new ( row_num as f64 , row_num as f64 , 1.0 )
159
- , Seq :: default ( ) ] )
163
+ index ( input, & [ Seq :: new ( row_num as f64 , row_num as f64 , 1.0 ) ,
164
+ Seq :: default ( ) ] )
160
165
}
161
166
162
167
#[ allow( dead_code) ]
@@ -300,11 +305,15 @@ pub fn lookup(input: &Array, indices: &Array, seq_dim: i32) -> Result<Array, AfE
300
305
/// // 1.0 1.0 1.0
301
306
/// // 2.0 2.0 2.0
302
307
/// ```
303
- pub fn assign_seq ( lhs : & Array , seqs : & [ Seq ] , rhs : & Array ) -> Result < Array , AfError > {
308
+ pub fn assign_seq < T : Copy > ( lhs : & Array , seqs : & [ Seq < T > ] , rhs : & Array ) -> Result < Array , AfError >
309
+ where c_double : From < T >
310
+ {
304
311
unsafe {
305
312
let mut temp: i64 = 0 ;
313
+ // TODO: allocating a whole new array on the heap just for this is BAD
314
+ let seqs: Vec < SeqInternal > = seqs. iter ( ) . map ( |s| SeqInternal :: from_seq ( s) ) . collect ( ) ;
306
315
let err_val = af_assign_seq ( & mut temp as MutAfArray , lhs. get ( ) as AfArray ,
307
- seqs. len ( ) as c_uint , seqs. as_ptr ( ) as * const Seq ,
316
+ seqs. len ( ) as c_uint , seqs. as_ptr ( ) as * const SeqInternal ,
308
317
rhs. get ( ) as AfArray ) ;
309
318
match err_val {
310
319
0 => Ok ( Array :: from ( temp) ) ,
@@ -402,3 +411,20 @@ pub fn assign_gen(lhs: &Array, indices: &Indexer, rhs: &Array) -> Result<Array,
402
411
}
403
412
}
404
413
}
414
+
415
+ #[ repr( C ) ]
416
+ struct SeqInternal {
417
+ begin : c_double ,
418
+ end : c_double ,
419
+ step : c_double ,
420
+ }
421
+
422
+ impl SeqInternal {
423
+ fn from_seq < T : Copy > ( s : & Seq < T > ) -> Self where c_double : From < T > {
424
+ SeqInternal {
425
+ begin : From :: from ( s. begin ( ) ) ,
426
+ end : From :: from ( s. end ( ) ) ,
427
+ step : From :: from ( s. step ( ) ) ,
428
+ }
429
+ }
430
+ }
0 commit comments