General Startup procedure of the Octopus Code

The flow chart for most calculations has many steps in common, most of them related to setting up the basic data structures of the code.

The ‘main’ program only performs tasks which are independent of the actual calculation and the physical system:


In a strongly simplified way, we have:

program main


  ! "constructors":           ! start code components

  call global_init()               ! initialize the mpi, clocks, etc.
  call parser_init()               ! initialize the input parser
  call messages_init()             ! initialize the message system
  call walltimer_init()            ! initialize the timer module
  call io_init()                   ! initialize the I/O subsystem
  call calc_mode_par_init()        ! initialize parallelization strategy
  call profiling_init()            ! initialize and start the profiling system

  call run(inp_calc_mode)          ! pass control to the 'actual code' running the calculation

  ! "destructors":            ! stop code components in reverse order

  call profiling_end()
  call calc_mode_par_end()
  call io_end()
  call walltimer_end() 
  call messages_end()
  call parser_end() 
  call global_end()

end programme

The actual calculation is started from the routine run(), which initialized the data structures representing the actual system and calculation mode, before starting the corresponding calculation:

main/run.F90 defines the module run_oct_m:

This module does not contain own data (apart from constants).

Definition of run()


  subroutine ground_state_run(system, from_scratch)
    class(*),        intent(inout) :: system
    logical,         intent(inout) :: from_scratch


    select type (system)
    class is (multisystem_basic_t)
      message(1) = "CalculationMode = gs not implemented for multi-system calculations"
      call messages_fatal(1, namespace=system%namespace)
    type is (electrons_t)
      call ground_state_run_legacy(system, from_scratch)
    end select

  end subroutine ground_state_run
  subroutine ground_state_run_legacy(electrons, from_scratch)
    class(electrons_t), intent(inout) :: electrons
    logical,            intent(inout) :: from_scratch


    call electrons_ground_state_run(electrons%namespace, electrons%mc, electrons%gr, electrons%ions, electrons%ext_partners, &
      electrons%st, electrons%ks, electrons%hm, electrons%outp, electrons%space, from_scratch)

  end subroutine ground_state_run_legacy
  subroutine electrons_ground_state_run(namespace, mc, gr, ions, ext_partners, st, ks, hm, outp, space, fromScratch)
    type(namespace_t),        intent(in)    :: namespace
    type(multicomm_t),        intent(in)    :: mc
    type(grid_t),             intent(inout) :: gr
    type(ions_t),             intent(inout) :: ions
    type(partner_list_t),     intent(in)    :: ext_partners
    type(states_elec_t),      intent(inout) :: st
    type(v_ks_t),             intent(inout) :: ks
    type(hamiltonian_elec_t), intent(inout) :: hm
    type(output_t),           intent(in)    :: outp
    type(space_t),            intent(in)    :: space
    logical,                  intent(inout) :: fromScratch

    type(scf_t)     :: scfv
    type(restart_t) :: restart_load, restart_dump
    integer         :: ierr
    type(rdm_t)     :: rdm
    logical         :: restart_init_dump


    call messages_write('Info: Allocating ground state wave-functions')
    call messages_info(namespace=namespace)

    if (st%parallel_in_states) then
      call messages_experimental('State parallelization for ground state calculations', namespace=namespace)
    end if

    if (hm%pcm%run_pcm) then
      if (hm%pcm%epsilon_infty /= hm%pcm%epsilon_0 .and. hm%pcm%tdlevel /= PCM_TD_EQ) then
        message(1) = 'Non-equilbrium PCM is not active in a time-independent run.'
        message(2) = 'You set epsilon_infty /= epsilon_0, but epsilon_infty is not relevant for CalculationMode = gs.'
        message(3) = 'By definition, the ground state is in equilibrium with the solvent.'
        message(4) = 'Therefore, the only relevant dielectric constant is the static one.'
        message(5) = 'Nevertheless, the dynamical PCM response matrix is evaluated for benchamarking purposes.'
        call messages_warning(5, namespace=namespace)
      end if
    end if

    call states_elec_allocate_wfns(st, gr%mesh, packed=.true.)

    ! sometimes a deadlock can occur here (if some nodes can allocate and other cannot)
    if (st%dom_st_kpt_mpi_grp%comm > 0) call st%dom_st_kpt_mpi_grp%barrier()
    call messages_write('Info: Ground-state allocation done.')
    call messages_info(namespace=namespace)

    if (.not. fromScratch) then
      ! load wavefunctions
      ! in RDMFT we need the full ground state
      call restart_init(restart_load, namespace, RESTART_GS, RESTART_TYPE_LOAD, mc, ierr, mesh=gr%mesh, &
        exact = (ks%theory_level == RDMFT))
      if (ierr == 0) then
        call states_elec_load(restart_load, namespace, space, st, gr%mesh, hm%kpoints, ierr)
      end if

      if (ierr /= 0) then
        call messages_write("Unable to read wavefunctions.")
        call messages_new_line()
        call messages_write("Starting from scratch!")
        call messages_warning(namespace=namespace)
        fromScratch = .true.
      end if
    end if

    call write_canonicalized_xyz_file("exec", "initial_coordinates", ions, gr%box, namespace)

    if (ks%theory_level /= RDMFT) then
      call scf_init(scfv, namespace, gr, ions, st, mc, hm, ks, space)
      ! only initialize dumping restart files for more than one iteration
      restart_init_dump = scfv%max_iter > 0
      restart_init_dump = .true.
    end if

    if (fromScratch .and. ks%theory_level /= RDMFT) then
      call lcao_run(namespace, space, gr, ions, ext_partners, st, ks, hm, lmm_r = scfv%lmm_r)
      ! setup Hamiltonian
      call messages_write('Info: Setting up Hamiltonian.')
      call messages_info(namespace=namespace)
      call v_ks_h_setup(namespace, space, gr, ions, ext_partners, st, ks, hm, &
        calc_eigenval = .false., calc_current = .false.)
    end if

    if (restart_init_dump) then
      call restart_init(restart_dump, namespace, RESTART_GS, RESTART_TYPE_DUMP, mc, ierr, mesh=gr%mesh)
    end if

    ! run self-consistency
    call scf_state_info(namespace, st)

    if (st%d%pack_states .and. hamiltonian_elec_apply_packed(hm)) then
      call st%pack()
    end if

    ! self-consistency for occupation numbers and natural orbitals in RDMFT
    if (ks%theory_level == RDMFT) then
      call rdmft_init(rdm, namespace, gr, st, mc, space, fromScratch)
      call scf_rdmft(rdm, namespace, space, gr, ions, ext_partners, st, ks, hm, outp, restart_dump)
      call rdmft_end(rdm)
      if (.not. fromScratch) then
        if (restart_init_dump) then
          call scf_run(scfv, namespace, space, mc, gr, ions, ext_partners, st, ks, hm, outp, &
            restart_load=restart_load, restart_dump=restart_dump)
          call scf_run(scfv, namespace, space, mc, gr, ions, ext_partners, st, ks, hm, outp, restart_load=restart_load)
        end if
        call restart_end(restart_load)
        if (restart_init_dump) then
          call scf_run(scfv, namespace, space, mc, gr, ions, ext_partners, st, ks, hm, outp, restart_dump=restart_dump)
          call scf_run(scfv, namespace, space, mc, gr, ions, ext_partners, st, ks, hm, outp)
        end if
      end if

      call scf_end(scfv)
    end if

    if (restart_init_dump) then
      call restart_end(restart_dump)
    end if

    if (st%d%pack_states .and. hamiltonian_elec_apply_packed(hm)) then
      call st%unpack()
    end if

    ! clean up
    call states_elec_deallocate_wfns(st)

  end subroutine electrons_ground_state_run