Skip to content

Commit 9a40117

Browse files
authored
Standard conformance fixes (#190)
* Make various functions impure to allow returning polymorphic allocatables * Update tested compilers * More on tested compilers * Update CMake definitions to make serial default and add PARALLEL macro
1 parent 3872202 commit 9a40117

16 files changed

+84
-73
lines changed

README.md

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -63,9 +63,10 @@ Optional dependencies are:
6363

6464
Compilers tested include:
6565

66-
* gfortran-9.4.0
67-
* ifort-2021.4
68-
* ifx-2021.4
66+
* flang-new 20.0.0
67+
* gfortran 13.2.0, 14.0.1
68+
* ifort 2021.13.1
69+
* ifx 2024.2.1
6970

7071
### Building with fpm
7172

@@ -85,7 +86,7 @@ Once installed, use the compiler wrappers `caf` and `cafrun` to build and execut
8586
in parallel, respectively:
8687

8788
```
88-
fpm build --compiler caf --profile release
89+
fpm build --compiler caf --profile release --flag "-cpp -DPARALLEL"
8990
```
9091

9192
#### Testing with fpm
@@ -107,7 +108,7 @@ See the [Fortran Package Manager](https://github.com/fortran-lang/fpm) for more
107108
```
108109
mkdir build
109110
cd build
110-
cmake .. -DSERIAL=1
111+
cmake ..
111112
make
112113
```
113114

@@ -122,7 +123,7 @@ in parallel, respectively:
122123

123124

124125
```
125-
FC=caf cmake ..
126+
FC=caf cmake .. -DPARALLEL
126127
make
127128
cafrun -n 4 bin/mnist # run MNIST example on 4 cores
128129
```
@@ -139,7 +140,7 @@ FC=ifort cmake ..
139140
for a parallel build of neural-fortran, or
140141

141142
```
142-
FC=ifort cmake .. -DSERIAL=1
143+
FC=ifort cmake ..
143144
```
144145

145146
for a serial build.

cmake/compilers.cmake

Lines changed: 20 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
11
# compiler flags for gfortran
22
if(CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
33

4-
if(SERIAL)
5-
message(STATUS "Configuring to build with -fcoarray=single")
6-
add_compile_options("$<$<COMPILE_LANGUAGE:Fortran>:-fcoarray=single>")
4+
if(PARALLEL)
5+
message(STATUS "Configuring to build with -fcoarray=shared")
6+
add_compile_options("$<$<COMPILE_LANGUAGE:Fortran>:-fcoarray=shared>")
7+
add_compile_definitions(PARALLEL)
78
else()
8-
add_compile_options("$<$<COMPILE_LANGUAGE:Fortran>:-fcoarray=lib>")
9+
add_compile_options("$<$<COMPILE_LANGUAGE:Fortran>:-fcoarray=single>")
910
endif()
1011

1112
if(BLAS)
@@ -14,21 +15,22 @@ if(CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
1415
message(STATUS "Configuring build to use BLAS from ${BLAS}")
1516
endif()
1617

17-
add_compile_options("$<$<AND:$<COMPILE_LANGUAGE:Fortran>,$<CONFIG:Debug>>:-fcheck=bounds;-fbacktrace>")
18-
add_compile_options("$<$<AND:$<COMPILE_LANGUAGE:Fortran>,$<CONFIG:Release>>:-Ofast;-fno-frontend-optimize;-fno-backtrace>")
18+
add_compile_options("$<$<AND:$<COMPILE_LANGUAGE:Fortran>,$<CONFIG:Debug>>:-cpp;-fcheck=bounds;-fbacktrace>")
19+
add_compile_options("$<$<AND:$<COMPILE_LANGUAGE:Fortran>,$<CONFIG:Release>>:-cpp;-Ofast;-fno-frontend-optimize;-fno-backtrace>")
1920

2021
elseif(CMAKE_Fortran_COMPILER_ID MATCHES "^Intel")
2122
# compiler flags for ifort
2223

23-
if(SERIAL)
24-
message(STATUS "Configuring to build with -coarray=single")
24+
if(PARALLEL)
25+
message(STATUS "Configuring to build with -coarray=shared")
2526
if(WIN32)
26-
add_compile_options("$<$<COMPILE_LANGUAGE:Fortran>:/Qcoarray:single>")
27-
add_link_options("$<$<COMPILE_LANGUAGE:Fortran>:/Qcoarray:single>")
27+
add_compile_options("$<$<COMPILE_LANGUAGE:Fortran>:/Qcoarray:shared>")
28+
add_link_options("$<$<COMPILE_LANGUAGE:Fortran>:/Qcoarray:shared>")
2829
else()
29-
add_compile_options("$<$<COMPILE_LANGUAGE:Fortran>:-coarray=single>")
30-
add_link_options("$<$<COMPILE_LANGUAGE:Fortran>:-coarray=single>")
30+
add_compile_options("$<$<COMPILE_LANGUAGE:Fortran>:-coarray=shared>")
31+
add_link_options("$<$<COMPILE_LANGUAGE:Fortran>:-coarray=shared>")
3132
endif()
33+
add_compile_definitions(PARALLEL)
3234
else()
3335
if(WIN32)
3436
add_compile_options("$<$<COMPILE_LANGUAGE:Fortran>:/Qcoarray:shared>")
@@ -40,16 +42,16 @@ elseif(CMAKE_Fortran_COMPILER_ID MATCHES "^Intel")
4042
endif()
4143

4244
if(WIN32)
43-
string(APPEND CMAKE_Fortran_FLAGS " /assume:byterecl")
45+
string(APPEND CMAKE_Fortran_FLAGS " /assume:byterecl /fpp")
4446
else()
45-
string(APPEND CMAKE_Fortran_FLAGS " -assume byterecl")
47+
string(APPEND CMAKE_Fortran_FLAGS " -assume byterecl -fpp")
4648
endif()
47-
add_compile_options("$<$<AND:$<COMPILE_LANGUAGE:Fortran>,$<CONFIG:Debug>>:-check;-traceback>")
48-
# add_compile_options("$<$<AND:$<COMPILE_LANGUAGE:Fortran>,$<CONFIG:Release>>:-O3>")
49+
add_compile_options("$<$<AND:$<COMPILE_LANGUAGE:Fortran>,$<CONFIG:Debug>>:-fpp;-check;-traceback>")
50+
add_compile_options("$<$<AND:$<COMPILE_LANGUAGE:Fortran>,$<CONFIG:Release>>:-fpp;-O3>")
4951

5052
elseif(CMAKE_Fortran_COMPILER_ID STREQUAL "Cray")
5153
# compiler flags for Cray ftn
5254
string(APPEND CMAKE_Fortran_FLAGS " -h noomp")
53-
add_compile_options("$<$<AND:$<COMPILE_LANGUAGE:Fortran>,$<CONFIG:Debug>>:-O0;-g>")
54-
add_compile_options("$<$<AND:$<COMPILE_LANGUAGE:Fortran>,$<CONFIG:Release>>:-O3>")
55+
add_compile_options("$<$<AND:$<COMPILE_LANGUAGE:Fortran>,$<CONFIG:Debug>>:-e Z;-O0;-g>")
56+
add_compile_options("$<$<AND:$<COMPILE_LANGUAGE:Fortran>,$<CONFIG:Release>>:-e Z;-O3>")
5557
endif()

cmake/options.cmake

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
option(SERIAL "Serial execution")
1+
option(PARALLEL "Parallel execution")
22
option(${PROJECT_NAME}_BUILD_TESTING "build ${PROJECT_NAME} tests" true)
33
option(${PROJECT_NAME}_BUILD_EXAMPLES "build ${PROJECT_NAME} examples" true)
44

@@ -8,10 +8,10 @@ set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
88
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
99
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)
1010

11-
if(SERIAL)
12-
message(STATUS "Configuring build for serial execution")
13-
else()
11+
if(PARALLEL)
1412
message(STATUS "Configuring build for parallel execution")
13+
else()
14+
message(STATUS "Configuring build for serial execution; configure with -DPARALLEL=1 for a parallel build")
1515
endif()
1616

1717
# --- Generally useful CMake project options

example/cnn_mnist.f90

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -40,9 +40,8 @@ program cnn_mnist
4040
optimizer=sgd(learning_rate=3.) &
4141
)
4242

43-
if (this_image() == 1) &
44-
print '(a,i2,a,f5.2,a)', 'Epoch ', n, ' done, Accuracy: ', accuracy( &
45-
net, validation_images, label_digits(validation_labels)) * 100, ' %'
43+
print '(a,i2,a,f5.2,a)', 'Epoch ', n, ' done, Accuracy: ', accuracy( &
44+
net, validation_images, label_digits(validation_labels)) * 100, ' %'
4645

4746
end do epochs
4847

example/dense_mnist.f90

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -24,9 +24,8 @@ program dense_mnist
2424

2525
call net % print_info()
2626

27-
if (this_image() == 1) &
28-
print '(a,f5.2,a)', 'Initial accuracy: ', accuracy( &
29-
net, validation_images, label_digits(validation_labels)) * 100, ' %'
27+
print '(a,f5.2,a)', 'Initial accuracy: ', accuracy( &
28+
net, validation_images, label_digits(validation_labels)) * 100, ' %'
3029

3130
epochs: do n = 1, num_epochs
3231

@@ -44,10 +43,9 @@ program dense_mnist
4443
! 2 metrics; 1st is default loss function (quadratic), other is Pearson corr.
4544
output_metrics = net % evaluate(validation_images, label_digits(validation_labels), metric=corr())
4645
mean_metrics = sum(output_metrics, 1) / size(output_metrics, 1)
47-
if (this_image() == 1) &
48-
print '(a,i2,3(a,f6.3))', 'Epoch ', n, ' done, Accuracy: ', &
49-
accuracy(net, validation_images, label_digits(validation_labels)) * 100, &
50-
'%, Loss: ', mean_metrics(1), ', Pearson correlation: ', mean_metrics(2)
46+
print '(a,i2,3(a,f6.3))', 'Epoch ', n, ' done, Accuracy: ', &
47+
accuracy(net, validation_images, label_digits(validation_labels)) * 100, &
48+
'%, Loss: ', mean_metrics(1), ', Pearson correlation: ', mean_metrics(2)
5149
end block
5250

5351
end do epochs

src/nf/nf_activation.f90

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -557,7 +557,7 @@ pure function eval_3d_celu_prime(self, x) result(res)
557557
end where
558558
end function
559559

560-
pure function get_activation_by_name(activation_name) result(res)
560+
function get_activation_by_name(activation_name) result(res)
561561
! Workaround to get activation_function with some
562562
! hardcoded default parameters by its name.
563563
! Need this function since we get only activation name

src/nf/nf_conv2d_layer.f90

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ module nf_conv2d_layer
4141
end type conv2d_layer
4242

4343
interface conv2d_layer
44-
pure module function conv2d_layer_cons(filters, kernel_size, activation) &
44+
module function conv2d_layer_cons(filters, kernel_size, activation) &
4545
result(res)
4646
!! `conv2d_layer` constructor function
4747
integer, intent(in) :: filters

src/nf/nf_conv2d_layer_submodule.f90

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88
contains
99

10-
pure module function conv2d_layer_cons(filters, kernel_size, activation) result(res)
10+
module function conv2d_layer_cons(filters, kernel_size, activation) result(res)
1111
implicit none
1212
integer, intent(in) :: filters
1313
integer, intent(in) :: kernel_size

src/nf/nf_dense_layer.f90

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ module nf_dense_layer
4242
end type dense_layer
4343

4444
interface dense_layer
45-
elemental module function dense_layer_cons(output_size, activation) &
45+
module function dense_layer_cons(output_size, activation) &
4646
result(res)
4747
!! This function returns the `dense_layer` instance.
4848
integer, intent(in) :: output_size

src/nf/nf_dense_layer_submodule.f90

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99
contains
1010

11-
elemental module function dense_layer_cons(output_size, activation) &
11+
module function dense_layer_cons(output_size, activation) &
1212
result(res)
1313
integer, intent(in) :: output_size
1414
class(activation_function), intent(in) :: activation
@@ -129,7 +129,9 @@ module subroutine init(self, input_shape)
129129
self % weights = self % weights / self % input_size
130130

131131
! Broadcast weights to all other images, if any.
132+
#ifdef PARALLEL
132133
call co_broadcast(self % weights, 1)
134+
#endif
133135

134136
allocate(self % biases(self % output_size))
135137
self % biases = 0

src/nf/nf_layer_constructors.f90

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ module nf_layer_constructors
1212

1313
interface input
1414

15-
pure module function input1d(layer_size) result(res)
15+
module function input1d(layer_size) result(res)
1616
!! 1-d input layer constructor.
1717
!!
1818
!! This layer is for inputting 1-d data to the network.
@@ -35,7 +35,7 @@ pure module function input1d(layer_size) result(res)
3535
!! Resulting layer instance
3636
end function input1d
3737

38-
pure module function input3d(layer_shape) result(res)
38+
module function input3d(layer_shape) result(res)
3939
!! 3-d input layer constructor.
4040
!!
4141
!! This layer is for inputting 3-d data to the network.
@@ -62,7 +62,7 @@ end function input3d
6262

6363
interface
6464

65-
pure module function dense(layer_size, activation) result(res)
65+
module function dense(layer_size, activation) result(res)
6666
!! Dense (fully-connected) layer constructor.
6767
!!
6868
!! This layer is a building block for dense, fully-connected networks,
@@ -85,7 +85,7 @@ pure module function dense(layer_size, activation) result(res)
8585
!! Resulting layer instance
8686
end function dense
8787

88-
pure module function flatten() result(res)
88+
module function flatten() result(res)
8989
!! Flatten (3-d -> 1-d) layer constructor.
9090
!!
9191
!! Use this layer to chain layers with 3-d outputs to layers with 1-d
@@ -106,7 +106,7 @@ pure module function flatten() result(res)
106106
!! Resulting layer instance
107107
end function flatten
108108

109-
pure module function conv2d(filters, kernel_size, activation) result(res)
109+
module function conv2d(filters, kernel_size, activation) result(res)
110110
!! 2-d convolutional layer constructor.
111111
!!
112112
!! This layer is for building 2-d convolutional network.
@@ -133,7 +133,7 @@ pure module function conv2d(filters, kernel_size, activation) result(res)
133133
!! Resulting layer instance
134134
end function conv2d
135135

136-
pure module function maxpool2d(pool_size, stride) result(res)
136+
module function maxpool2d(pool_size, stride) result(res)
137137
!! 2-d maxpooling layer constructor.
138138
!!
139139
!! This layer is for downscaling other layers, typically `conv2d`.
@@ -155,7 +155,7 @@ pure module function maxpool2d(pool_size, stride) result(res)
155155
!! Resulting layer instance
156156
end function maxpool2d
157157

158-
pure module function reshape(output_shape) result(res)
158+
module function reshape(output_shape) result(res)
159159
!! Rank-1 to rank-any reshape layer constructor.
160160
!! Currently implemented is only rank-3 for the output of the reshape.
161161
!!

src/nf/nf_layer_constructors_submodule.f90

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414

1515
contains
1616

17-
pure module function conv2d(filters, kernel_size, activation) result(res)
17+
module function conv2d(filters, kernel_size, activation) result(res)
1818
integer, intent(in) :: filters
1919
integer, intent(in) :: kernel_size
2020
class(activation_function), intent(in), optional :: activation
@@ -40,7 +40,7 @@ pure module function conv2d(filters, kernel_size, activation) result(res)
4040
end function conv2d
4141

4242

43-
pure module function dense(layer_size, activation) result(res)
43+
module function dense(layer_size, activation) result(res)
4444
integer, intent(in) :: layer_size
4545
class(activation_function), intent(in), optional :: activation
4646
type(layer) :: res
@@ -63,14 +63,14 @@ pure module function dense(layer_size, activation) result(res)
6363
end function dense
6464

6565

66-
pure module function flatten() result(res)
66+
module function flatten() result(res)
6767
type(layer) :: res
6868
res % name = 'flatten'
6969
allocate(res % p, source=flatten_layer())
7070
end function flatten
7171

7272

73-
pure module function input1d(layer_size) result(res)
73+
module function input1d(layer_size) result(res)
7474
integer, intent(in) :: layer_size
7575
type(layer) :: res
7676
res % name = 'input'
@@ -81,7 +81,7 @@ pure module function input1d(layer_size) result(res)
8181
end function input1d
8282

8383

84-
pure module function input3d(layer_shape) result(res)
84+
module function input3d(layer_shape) result(res)
8585
integer, intent(in) :: layer_shape(3)
8686
type(layer) :: res
8787
res % name = 'input'
@@ -91,7 +91,7 @@ pure module function input3d(layer_shape) result(res)
9191
res % initialized = .true.
9292
end function input3d
9393

94-
pure module function maxpool2d(pool_size, stride) result(res)
94+
module function maxpool2d(pool_size, stride) result(res)
9595
integer, intent(in) :: pool_size
9696
integer, intent(in), optional :: stride
9797
integer :: stride_
@@ -119,7 +119,7 @@ pure module function maxpool2d(pool_size, stride) result(res)
119119

120120
end function maxpool2d
121121

122-
pure module function reshape(output_shape) result(res)
122+
module function reshape(output_shape) result(res)
123123
integer, intent(in) :: output_shape(:)
124124
type(layer) :: res
125125

src/nf/nf_metrics.f90

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ end function metric_interface
3636

3737
contains
3838

39-
pure module function corr_eval(true, predicted) result(res)
39+
pure function corr_eval(true, predicted) result(res)
4040
!! Pearson correlation function:
4141
!!
4242
real, intent(in) :: true(:)

0 commit comments

Comments
 (0)