Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@ add_library(neural-fortran
src/nf/nf_flatten_layer_submodule.f90
src/nf/nf_input1d_layer.f90
src/nf/nf_input1d_layer_submodule.f90
src/nf/nf_input2d_layer.f90
src/nf/nf_input2d_layer_submodule.f90
src/nf/nf_input3d_layer.f90
src/nf/nf_input3d_layer_submodule.f90
src/nf/nf_layer_constructors.f90
Expand Down
2 changes: 1 addition & 1 deletion LICENSE
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
MIT License

Copyright (c) 2018-2024 neural-fortran contributors
Copyright (c) 2018-2025 neural-fortran contributors

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ Read the paper [here](https://arxiv.org/abs/1902.06714).

| Layer type | Constructor name | Supported input layers | Rank of output array | Forward pass | Backward pass |
|------------|------------------|------------------------|----------------------|--------------|---------------|
| Input | `input` | n/a | 1, 3 | n/a | n/a |
| Input | `input` | n/a | 1, 2, 3 | n/a | n/a |
| Dense (fully-connected) | `dense` | `input1d`, `flatten` | 1 | ✅ | ✅ |
| Convolutional (2-d) | `conv2d` | `input3d`, `conv2d`, `maxpool2d`, `reshape` | 3 | ✅ | ✅(*) |
| Max-pooling (2-d) | `maxpool2d` | `input3d`, `conv2d`, `maxpool2d`, `reshape` | 3 | ✅ | ✅ |
Expand Down
6 changes: 3 additions & 3 deletions fpm.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
name = "neural-fortran"
version = "0.18.0"
version = "0.19.0"
license = "MIT"
author = "Milan Curcic"
maintainer = "[email protected]"
copyright = "Copyright 2018-2024, neural-fortran contributors"
maintainer = "[email protected]"
copyright = "Copyright 2018-2025, neural-fortran contributors"
48 changes: 48 additions & 0 deletions src/nf/nf_input2d_layer.f90
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
module nf_input2d_layer

!! This module provides the `input2d_layer` type.

use nf_base_layer, only: base_layer
implicit none

private
public :: input2d_layer

type, extends(base_layer) :: input2d_layer
real, allocatable :: output(:,:)
contains
procedure :: init
procedure :: set
end type input2d_layer

interface input2d_layer
pure module function input2d_layer_cons(output_shape) result(res)
!! Create a new instance of the 2-d input layer.
!! Only used internally by the `layer % init` method.
integer, intent(in) :: output_shape(2)
!! Shape of the input layer
type(input2d_layer) :: res
!! 2-d input layer instance
end function input2d_layer_cons
end interface input2d_layer

interface

module subroutine init(self, input_shape)
!! Only here to satisfy the language rules
!! about deferred methods of abstract types.
!! This method does nothing for this type and should not be called.
class(input2d_layer), intent(in out) :: self
integer, intent(in) :: input_shape(:)
end subroutine init

pure module subroutine set(self, values)
class(input2d_layer), intent(in out) :: self
!! Layer instance
real, intent(in) :: values(:,:)
!! Values to set
end subroutine set

end interface

end module nf_input2d_layer
23 changes: 23 additions & 0 deletions src/nf/nf_input2d_layer_submodule.f90
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
submodule(nf_input2d_layer) nf_input2d_layer_submodule
implicit none
contains

pure module function input2d_layer_cons(output_shape) result(res)
integer, intent(in) :: output_shape(2)
type(input2d_layer) :: res
allocate(res % output(output_shape(1), output_shape(2)))
res % output = 0
end function input2d_layer_cons

module subroutine init(self, input_shape)
class(input2d_layer), intent(in out) :: self
integer, intent(in) :: input_shape(:)
end subroutine init

pure module subroutine set(self, values)
class(input2d_layer), intent(in out) :: self
real, intent(in) :: values(:,:)
self % output = values
end subroutine set

end submodule nf_input2d_layer_submodule
27 changes: 25 additions & 2 deletions src/nf/nf_layer.f90
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,14 @@ module nf_layer

! Specific subroutines for different array ranks
procedure, private :: backward_1d
procedure, private :: backward_2d
procedure, private :: backward_3d
procedure, private :: get_output_1d
procedure, private :: get_output_2d
procedure, private :: get_output_3d

generic :: backward => backward_1d, backward_3d
generic :: get_output => get_output_1d, get_output_3d
generic :: backward => backward_1d, backward_2d, backward_3d
generic :: get_output => get_output_1d, get_output_2d, get_output_3d

end type layer

Expand All @@ -59,6 +61,19 @@ pure module subroutine backward_1d(self, previous, gradient)
!! Array of gradient values from the next layer
end subroutine backward_1d

pure module subroutine backward_2d(self, previous, gradient)
!! Apply a backward pass on the layer.
!! This changes the internal state of the layer.
!! This is normally called internally by the `network % backward`
!! method.
class(layer), intent(in out) :: self
!! Layer instance
class(layer), intent(in) :: previous
!! Previous layer instance
real, intent(in) :: gradient(:, :)
!! Array of gradient values from the next layer
end subroutine backward_2d

pure module subroutine backward_3d(self, previous, gradient)
!! Apply a backward pass on the layer.
!! This changes the internal state of the layer.
Expand Down Expand Up @@ -95,6 +110,14 @@ pure module subroutine get_output_1d(self, output)
!! Output values from this layer
end subroutine get_output_1d

pure module subroutine get_output_2d(self, output)
!! Returns the output values (activations) from this layer.
class(layer), intent(in) :: self
!! Layer instance
real, allocatable, intent(out) :: output(:,:)
!! Output values from this layer
end subroutine get_output_2d

pure module subroutine get_output_3d(self, output)
!! Returns the output values (activations) from a layer with a 3-d output
!! (e.g. input3d, conv2d)
Expand Down
31 changes: 25 additions & 6 deletions src/nf/nf_layer_constructors.f90
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,10 @@ module function input1d(layer_size) result(res)
!! Resulting layer instance
end function input1d

module function input3d(layer_shape) result(res)
!! 3-d input layer constructor.
module function input2d(dim1, dim2) result(res)
!! 2-d input layer constructor.
!!
!! This layer is for inputting 3-d data to the network.
!! This layer is for inputting 2-d data to the network.
!! Currently, this layer must be followed by a conv2d layer.
!! An input layer must be the first layer in the network.
!!
Expand All @@ -50,10 +50,29 @@ module function input3d(layer_shape) result(res)
!! ```
!! use nf, only :: input, layer
!! type(layer) :: input_layer
!! input_layer = input([28, 28, 1])
!! input_layer = input(28, 28)
!! ```
integer, intent(in) :: dim1, dim2
!! First and second dimension sizes
type(layer) :: res
!! Resulting layer instance
end function input2d

module function input3d(dim1, dim2, dim3) result(res)
!! 3-d input layer constructor.
!!
!! This is a specific function that is available
!! under a generic name `input`.
!!
!! Example:
!!
!! ```
!! use nf, only :: input, layer
!! type(layer) :: input_layer
!! input_layer = input(28, 28, 1)
!! ```
integer, intent(in) :: layer_shape(3)
!! Shape of the input layer
integer, intent(in) :: dim1, dim2, dim3
!! First, second and third dimension sizes
type(layer) :: res
!! Resulting layer instance
end function input3d
Expand Down
22 changes: 18 additions & 4 deletions src/nf/nf_layer_constructors_submodule.f90
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
use nf_dense_layer, only: dense_layer
use nf_flatten_layer, only: flatten_layer
use nf_input1d_layer, only: input1d_layer
use nf_input2d_layer, only: input2d_layer
use nf_input3d_layer, only: input3d_layer
use nf_maxpool2d_layer, only: maxpool2d_layer
use nf_reshape_layer, only: reshape3d_layer
Expand Down Expand Up @@ -81,16 +82,28 @@ module function input1d(layer_size) result(res)
end function input1d


module function input3d(layer_shape) result(res)
integer, intent(in) :: layer_shape(3)
module function input2d(dim1, dim2) result(res)
integer, intent(in) :: dim1, dim2
type(layer) :: res
res % name = 'input'
res % layer_shape = layer_shape
res % layer_shape = [dim1, dim2]
res % input_layer_shape = [integer ::]
allocate(res % p, source=input3d_layer(layer_shape))
allocate(res % p, source=input2d_layer([dim1, dim2]))
res % initialized = .true.
end function input2d


module function input3d(dim1, dim2, dim3) result(res)
integer, intent(in) :: dim1, dim2, dim3
type(layer) :: res
res % name = 'input'
res % layer_shape = [dim1, dim2, dim3]
res % input_layer_shape = [integer ::]
allocate(res % p, source=input3d_layer([dim1, dim2, dim3]))
res % initialized = .true.
end function input3d


module function maxpool2d(pool_size, stride) result(res)
integer, intent(in) :: pool_size
integer, intent(in), optional :: stride
Expand Down Expand Up @@ -119,6 +132,7 @@ module function maxpool2d(pool_size, stride) result(res)

end function maxpool2d


module function reshape(output_shape) result(res)
integer, intent(in) :: output_shape(:)
type(layer) :: res
Expand Down
41 changes: 41 additions & 0 deletions src/nf/nf_layer_submodule.f90
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
use nf_dense_layer, only: dense_layer
use nf_flatten_layer, only: flatten_layer
use nf_input1d_layer, only: input1d_layer
use nf_input2d_layer, only: input2d_layer
use nf_input3d_layer, only: input3d_layer
use nf_maxpool2d_layer, only: maxpool2d_layer
use nf_reshape_layer, only: reshape3d_layer
Expand Down Expand Up @@ -51,6 +52,18 @@ pure module subroutine backward_1d(self, previous, gradient)
end subroutine backward_1d


pure module subroutine backward_2d(self, previous, gradient)
implicit none
class(layer), intent(in out) :: self
class(layer), intent(in) :: previous
real, intent(in) :: gradient(:,:)

! Backward pass from a 2-d layer downstream currently implemented
! only for dense and flatten layers
! CURRENTLY NO LAYERS, tbd: pull/197 and pull/199
end subroutine backward_2d


pure module subroutine backward_3d(self, previous, gradient)
implicit none
class(layer), intent(in out) :: self
Expand Down Expand Up @@ -205,6 +218,23 @@ pure module subroutine get_output_1d(self, output)
end subroutine get_output_1d


pure module subroutine get_output_2d(self, output)
implicit none
class(layer), intent(in) :: self
real, allocatable, intent(out) :: output(:,:)

select type(this_layer => self % p)

type is(input2d_layer)
allocate(output, source=this_layer % output)
class default
error stop '1-d output can only be read from an input1d, dense, or flatten layer.'

end select

end subroutine get_output_2d


pure module subroutine get_output_3d(self, output)
implicit none
class(layer), intent(in) :: self
Expand Down Expand Up @@ -280,6 +310,8 @@ elemental module function get_num_params(self) result(num_params)
select type (this_layer => self % p)
type is (input1d_layer)
num_params = 0
type is (input2d_layer)
num_params = 0
type is (input3d_layer)
num_params = 0
type is (dense_layer)
Expand All @@ -305,6 +337,8 @@ module function get_params(self) result(params)
select type (this_layer => self % p)
type is (input1d_layer)
! No parameters to get.
type is (input2d_layer)
! No parameters to get.
type is (input3d_layer)
! No parameters to get.
type is (dense_layer)
Expand All @@ -330,6 +364,8 @@ module function get_gradients(self) result(gradients)
select type (this_layer => self % p)
type is (input1d_layer)
! No gradients to get.
type is (input2d_layer)
! No gradients to get.
type is (input3d_layer)
! No gradients to get.
type is (dense_layer)
Expand Down Expand Up @@ -373,6 +409,11 @@ module subroutine set_params(self, params)
write(stderr, '(a)') 'Warning: calling set_params() ' &
// 'on a zero-parameter layer; nothing to do.'

type is (input2d_layer)
! No parameters to set.
write(stderr, '(a)') 'Warning: calling set_params() ' &
// 'on a zero-parameter layer; nothing to do.'

type is (input3d_layer)
! No parameters to set.
write(stderr, '(a)') 'Warning: calling set_params() ' &
Expand Down
Loading