@@ -133,10 +133,8 @@ public static void load_weights_from_hdf5_group(long f, List<ILayer> layers)
133
133
long g = H5G . open ( f , name ) ;
134
134
var weight_names = load_attributes_from_hdf5_group ( g , "weight_names" ) ;
135
135
foreach ( var i_ in weight_names )
136
- {
137
- var vm = Regex . Replace ( i_ , "/" , "$" ) ;
138
- vm = i_ . Split ( '/' ) [ 0 ] + "/$" + vm . Substring ( i_ . Split ( '/' ) [ 0 ] . Length + 1 , i_ . Length - i_ . Split ( '/' ) [ 0 ] . Length - 1 ) ;
139
- ( success , Array result ) = Hdf5 . ReadDataset < float > ( g , vm ) ;
136
+ {
137
+ ( success , Array result ) = Hdf5 . ReadDataset < float > ( g , i_ ) ;
140
138
if ( success )
141
139
weight_values . Add ( np . array ( result ) ) ;
142
140
}
@@ -196,9 +194,13 @@ public static void save_weights_to_hdf5_group(long f, List<ILayer> layers)
196
194
var tensor = val . AsTensor ( ) ;
197
195
if ( name . IndexOf ( "/" ) > 1 )
198
196
{
199
- var crDataGroup = Hdf5 . CreateOrOpenGroup ( g , Hdf5Utils . NormalizedName ( name . Split ( '/' ) [ 0 ] ) ) ;
200
- var _name = Regex . Replace ( name . Substring ( name . Split ( '/' ) [ 0 ] . Length , name . Length - name . Split ( '/' ) [ 0 ] . Length ) , "/" , "$" ) ;
201
- WriteDataset ( crDataGroup , _name , tensor ) ;
197
+ var crDataGroup = g ;
198
+ string [ ] name_split = name . Split ( '/' ) ;
199
+ for ( int i = 0 ; i < name_split . Length - 1 ; i ++ )
200
+ {
201
+ crDataGroup = Hdf5 . CreateOrOpenGroup ( crDataGroup , Hdf5Utils . NormalizedName ( name_split [ i ] ) ) ;
202
+ }
203
+ WriteDataset ( crDataGroup , name_split [ name_split . Length - 1 ] , tensor ) ;
202
204
Hdf5 . CloseGroup ( crDataGroup ) ;
203
205
}
204
206
else
0 commit comments