A binary search might be quicker, depending on the speed of a failed read. I've tested the following changes, assuming 512 byte records.
program test
!
character ifile *120
integer*4 file_size_bytes, bytes
external file_size_bytes
!
ifile = 'test.ima'
!
call create_file (ifile, 7213)
!
bytes = file_size_bytes (ifile)
!
write (*,fmt='(a,i0)') 'Number of bytes in file '//trim(ifile)//' = ', bytes
!
end
subroutine create_file (ifile, num)
!
! create a file of 'num' records of 2048 bytes to test
! demonstrates record size must only be multiple of 512 bytes
!
character ifile*(*)
integer*4 num, i
!
open (10, file=ifile, access='direct', recl=512*4, iostat=i)
do i = 1,num
write (10, rec=i) i
end do
close (10)
write (*,*) trim (ifile),' created'
end
integer*4 function file_size_bytes (ifile)
!
! determine number of 512 byte records in file
! file record size must be a multiple of 512 bytes
!
character ifile*(*)
!
integer*4 good, bad, i
character string*64
!
open (10, file=ifile, access='direct', recl=512, err=900)
!
good = 0
bad = 0
!
do i = 1,huge(i)
if (bad > 0) then
next = good + (bad-good)/2 ! split the difference
if (next == good) exit
else
next = max (good,512)*2 ! expand the search
end if
read (10,rec=next,err=110) string
print*,i, ' testing', next,' is ok'
good = next
cycle
110 bad = next
print*,i,' testing', next,' is bad'
end do
!
write (*,fmt='(a,i0,a,i0,a)') 'Number of records: ',good,' ( ',i,' tests )'
file_size_bytes = good*512
goto 999
!
900 print*,'error opening file'
file_size_bytes = -1
999 close (10)
end