reference, declarationdefinition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced

References

projects/openmp/runtime/src/kmp.h
  223   kmp_int32 reserved_1; /**<  might be used in Fortran; see above  */
  224   kmp_int32 flags; /**<  also f.flags; KMP_IDENT_xxx flags; KMP_IDENT_KMPC
  226   kmp_int32 reserved_2; /**<  not really used in Fortran any more; see above */
  231   kmp_int32 reserved_3; /**< source[4] in Fortran, do not use for C++  */
  336 enum sched_type : kmp_int32 {
 1384 typedef kmp_int32 kmp_critical_name[8];
 1395 typedef void (*kmpc_micro)(kmp_int32 *global_tid, kmp_int32 *bound_tid, ...);
 1395 typedef void (*kmpc_micro)(kmp_int32 *global_tid, kmp_int32 *bound_tid, ...);
 1396 typedef void (*kmpc_micro_bound)(kmp_int32 *bound_tid, kmp_int32 *bound_nth,
 1396 typedef void (*kmpc_micro_bound)(kmp_int32 *bound_tid, kmp_int32 *bound_nth,
 1543   kmp_int32 count;
 1544   kmp_int32 ub;
 1546   kmp_int32 lb;
 1547   kmp_int32 st;
 1548   kmp_int32 tc;
 1549   kmp_int32 static_steal_counter; /* for static_steal only; maybe better to put
 1559     kmp_int32 parm1; //     structures in kmp_dispatch.cpp. This should
 1560     kmp_int32 parm2; //     make no real change at least while padding is off.
 1561     kmp_int32 parm3;
 1562     kmp_int32 parm4;
 1659   kmp_int32 ordered_bumped;
 1661   kmp_int32 ordered_dummy[KMP_MAX_ORDERED - 3];
 1664   kmp_int32 type_size; /* the size of types in private_info */
 1679   kmp_int32 ordered_dummy[KMP_MAX_ORDERED - 1];
 1698   volatile kmp_int32 doacross_buf_idx; // teamwise index
 1700   kmp_int32 doacross_num_done; // count finished threads
 1722   kmp_int32 th_disp_index;
 1723   kmp_int32 th_doacross_buf_idx; // thread's doacross buffer index
 1818   kmp_int32 default_device; /* internal control for default device */
 1841   kmp_int32 parent_tid;
 1842   kmp_int32 old_tid;
 2065 extern kmp_int32 __kmp_default_device; // Set via OMP_DEFAULT_DEVICE if
 2068 extern kmp_int32 __kmp_max_task_priority;
 2088 typedef kmp_int32 (*kmp_routine_entry_t)(kmp_int32, void *);
 2088 typedef kmp_int32 (*kmp_routine_entry_t)(kmp_int32, void *);
 2091   kmp_int32 priority; /**< priority specified by user for the task */
 2105   kmp_int32 part_id; /**< part id for the task                          */
 2118   std::atomic<kmp_int32> count; // number of allocated and incomplete tasks
 2119   std::atomic<kmp_int32>
 2124   kmp_int32 reduce_num_data; // number of data items to reduce
 2156   kmp_int32 mtx_num_locks; /* number of locks in mtx_locks array */
 2161   std::atomic<kmp_int32> npredecessors;
 2162   std::atomic<kmp_int32> nrefs;
 2176   kmp_int32 last_flag;
 2195     kmp_int32 reserved : 30;
 2263   kmp_int32 td_task_id; /* id, assigned by debugger                */
 2269   kmp_int32 td_level; /* task nesting level                      */
 2270   std::atomic<kmp_int32> td_untied_count; // untied task active parts counter
 2275   kmp_int32 td_taskwait_thread; /* gtid + 1 of thread encountered taskwait */
 2278   KMP_ALIGN_CACHE std::atomic<kmp_int32>
 2281   std::atomic<kmp_int32>
 2290   kmp_int32 td_size_alloc; // The size of task structure, including shareds etc.
 2293   kmp_int32 td_size_loop_bounds;
 2317   kmp_int32 td_deque_size; // Size of deck
 2320   kmp_int32 td_deque_ntasks; // Number of tasks in deque
 2322   kmp_int32 td_deque_last_stolen; // Thread number of last successful steal
 2350   kmp_int32 tt_found_tasks; /* Have we found tasks and queued them while
 2353   kmp_int32 tt_nproc; /* #threads in team           */
 2354   kmp_int32 tt_max_threads; // # entries allocated for threads_data array
 2355   kmp_int32 tt_found_proxy_tasks; // found proxy tasks since last barrier
 2356   kmp_int32 tt_untied_task_encountered;
 2359   std::atomic<kmp_int32> tt_unfinished_threads; /* #threads still active */
 2388   kmp_int32 hot_team_nth; // number of threads allocated for the hot_team
 2392   kmp_int32 nteams; // number of teams in a league
 2393   kmp_int32 nth; // number of threads in each team of the league
 2411   kmp_int32 cg_thread_limit;
 2412   kmp_int32 cg_nthreads; // Count of active threads in CG rooted at cg_root
 2525   KMP_ALIGN_CACHE volatile kmp_int32
 2684   std::atomic<kmp_int32> t_cancel_request;
 2749   kmp_int32 data;
 2892 extern kmp_int32 __kmp_use_yield;
 2893 extern kmp_int32 __kmp_use_yield_exp_set;
 3091 extern std::atomic<kmp_int32> __kmp_team_counter;
 3093 extern std::atomic<kmp_int32> __kmp_task_counter;
 3218 extern void __kmpc_dispatch_init_4(ident_t *loc, kmp_int32 gtid,
 3219                                    enum sched_type schedule, kmp_int32 lb,
 3220                                    kmp_int32 ub, kmp_int32 st, kmp_int32 chunk);
 3220                                    kmp_int32 ub, kmp_int32 st, kmp_int32 chunk);
 3220                                    kmp_int32 ub, kmp_int32 st, kmp_int32 chunk);
 3221 extern void __kmpc_dispatch_init_4u(ident_t *loc, kmp_int32 gtid,
 3223                                     kmp_uint32 ub, kmp_int32 st,
 3224                                     kmp_int32 chunk);
 3225 extern void __kmpc_dispatch_init_8(ident_t *loc, kmp_int32 gtid,
 3228 extern void __kmpc_dispatch_init_8u(ident_t *loc, kmp_int32 gtid,
 3233 extern int __kmpc_dispatch_next_4(ident_t *loc, kmp_int32 gtid,
 3234                                   kmp_int32 *p_last, kmp_int32 *p_lb,
 3234                                   kmp_int32 *p_last, kmp_int32 *p_lb,
 3235                                   kmp_int32 *p_ub, kmp_int32 *p_st);
 3235                                   kmp_int32 *p_ub, kmp_int32 *p_st);
 3236 extern int __kmpc_dispatch_next_4u(ident_t *loc, kmp_int32 gtid,
 3237                                    kmp_int32 *p_last, kmp_uint32 *p_lb,
 3238                                    kmp_uint32 *p_ub, kmp_int32 *p_st);
 3239 extern int __kmpc_dispatch_next_8(ident_t *loc, kmp_int32 gtid,
 3240                                   kmp_int32 *p_last, kmp_int64 *p_lb,
 3242 extern int __kmpc_dispatch_next_8u(ident_t *loc, kmp_int32 gtid,
 3243                                    kmp_int32 *p_last, kmp_uint64 *p_lb,
 3246 extern void __kmpc_dispatch_fini_4(ident_t *loc, kmp_int32 gtid);
 3247 extern void __kmpc_dispatch_fini_8(ident_t *loc, kmp_int32 gtid);
 3248 extern void __kmpc_dispatch_fini_4u(ident_t *loc, kmp_int32 gtid);
 3249 extern void __kmpc_dispatch_fini_8u(ident_t *loc, kmp_int32 gtid);
 3253 extern void __kmp_aux_dispatch_init_4(ident_t *loc, kmp_int32 gtid,
 3254                                       enum sched_type schedule, kmp_int32 lb,
 3255                                       kmp_int32 ub, kmp_int32 st,
 3255                                       kmp_int32 ub, kmp_int32 st,
 3256                                       kmp_int32 chunk, int push_ws);
 3257 extern void __kmp_aux_dispatch_init_4u(ident_t *loc, kmp_int32 gtid,
 3259                                        kmp_uint32 ub, kmp_int32 st,
 3260                                        kmp_int32 chunk, int push_ws);
 3261 extern void __kmp_aux_dispatch_init_8(ident_t *loc, kmp_int32 gtid,
 3265 extern void __kmp_aux_dispatch_init_8u(ident_t *loc, kmp_int32 gtid,
 3269 extern void __kmp_aux_dispatch_fini_chunk_4(ident_t *loc, kmp_int32 gtid);
 3270 extern void __kmp_aux_dispatch_fini_chunk_8(ident_t *loc, kmp_int32 gtid);
 3271 extern void __kmp_aux_dispatch_fini_chunk_4u(ident_t *loc, kmp_int32 gtid);
 3272 extern void __kmp_aux_dispatch_fini_chunk_8u(ident_t *loc, kmp_int32 gtid);
 3465                            enum fork_context_e fork_context, kmp_int32 argc,
 3483 extern void __kmp_serialized_parallel(ident_t *id, kmp_int32 gtid);
 3516 extern kmp_task_t *__kmp_task_alloc(ident_t *loc_ref, kmp_int32 gtid,
 3532 int __kmp_execute_tasks_32(kmp_info_t *thread, kmp_int32 gtid,
 3538                            kmp_int32 is_constrained);
 3539 int __kmp_execute_tasks_64(kmp_info_t *thread, kmp_int32 gtid,
 3545                            kmp_int32 is_constrained);
 3546 int __kmp_execute_tasks_oncore(kmp_info_t *thread, kmp_int32 gtid,
 3552                                kmp_int32 is_constrained);
 3593 KMP_EXPORT void __kmpc_begin(ident_t *, kmp_int32 flags);
 3604 KMP_EXPORT void *__kmpc_threadprivate(ident_t *, kmp_int32 global_tid,
 3607 KMP_EXPORT kmp_int32 __kmpc_global_thread_num(ident_t *);
 3608 KMP_EXPORT kmp_int32 __kmpc_global_num_threads(ident_t *);
 3609 KMP_EXPORT kmp_int32 __kmpc_bound_thread_num(ident_t *);
 3610 KMP_EXPORT kmp_int32 __kmpc_bound_num_threads(ident_t *);
 3612 KMP_EXPORT kmp_int32 __kmpc_ok_to_fork(ident_t *);
 3613 KMP_EXPORT void __kmpc_fork_call(ident_t *, kmp_int32 nargs,
 3616 KMP_EXPORT void __kmpc_serialized_parallel(ident_t *, kmp_int32 global_tid);
 3617 KMP_EXPORT void __kmpc_end_serialized_parallel(ident_t *, kmp_int32 global_tid);
 3620 KMP_EXPORT void __kmpc_barrier(ident_t *, kmp_int32 global_tid);
 3621 KMP_EXPORT kmp_int32 __kmpc_master(ident_t *, kmp_int32 global_tid);
 3621 KMP_EXPORT kmp_int32 __kmpc_master(ident_t *, kmp_int32 global_tid);
 3622 KMP_EXPORT void __kmpc_end_master(ident_t *, kmp_int32 global_tid);
 3623 KMP_EXPORT void __kmpc_ordered(ident_t *, kmp_int32 global_tid);
 3624 KMP_EXPORT void __kmpc_end_ordered(ident_t *, kmp_int32 global_tid);
 3625 KMP_EXPORT void __kmpc_critical(ident_t *, kmp_int32 global_tid,
 3627 KMP_EXPORT void __kmpc_end_critical(ident_t *, kmp_int32 global_tid,
 3629 KMP_EXPORT void __kmpc_critical_with_hint(ident_t *, kmp_int32 global_tid,
 3632 KMP_EXPORT kmp_int32 __kmpc_barrier_master(ident_t *, kmp_int32 global_tid);
 3632 KMP_EXPORT kmp_int32 __kmpc_barrier_master(ident_t *, kmp_int32 global_tid);
 3633 KMP_EXPORT void __kmpc_end_barrier_master(ident_t *, kmp_int32 global_tid);
 3635 KMP_EXPORT kmp_int32 __kmpc_barrier_master_nowait(ident_t *,
 3636                                                   kmp_int32 global_tid);
 3638 KMP_EXPORT kmp_int32 __kmpc_single(ident_t *, kmp_int32 global_tid);
 3638 KMP_EXPORT kmp_int32 __kmpc_single(ident_t *, kmp_int32 global_tid);
 3639 KMP_EXPORT void __kmpc_end_single(ident_t *, kmp_int32 global_tid);
 3641 KMP_EXPORT void KMPC_FOR_STATIC_INIT(ident_t *loc, kmp_int32 global_tid,
 3642                                      kmp_int32 schedtype, kmp_int32 *plastiter,
 3642                                      kmp_int32 schedtype, kmp_int32 *plastiter,
 3647 KMP_EXPORT void __kmpc_for_static_fini(ident_t *loc, kmp_int32 global_tid);
 3649 KMP_EXPORT void __kmpc_copyprivate(ident_t *loc, kmp_int32 global_tid,
 3652                                    kmp_int32 didit);
 3659 KMP_EXPORT kmp_int32 __kmpc_omp_task(ident_t *loc_ref, kmp_int32 gtid,
 3659 KMP_EXPORT kmp_int32 __kmpc_omp_task(ident_t *loc_ref, kmp_int32 gtid,
 3661 KMP_EXPORT kmp_task_t *__kmpc_omp_task_alloc(ident_t *loc_ref, kmp_int32 gtid,
 3662                                              kmp_int32 flags,
 3666 KMP_EXPORT kmp_task_t *__kmpc_omp_target_task_alloc(ident_t *loc_ref, kmp_int32 gtid,
 3667                                                     kmp_int32 flags,
 3672 KMP_EXPORT void __kmpc_omp_task_begin_if0(ident_t *loc_ref, kmp_int32 gtid,
 3674 KMP_EXPORT void __kmpc_omp_task_complete_if0(ident_t *loc_ref, kmp_int32 gtid,
 3676 KMP_EXPORT kmp_int32 __kmpc_omp_task_parts(ident_t *loc_ref, kmp_int32 gtid,
 3676 KMP_EXPORT kmp_int32 __kmpc_omp_task_parts(ident_t *loc_ref, kmp_int32 gtid,
 3678 KMP_EXPORT kmp_int32 __kmpc_omp_taskwait(ident_t *loc_ref, kmp_int32 gtid);
 3678 KMP_EXPORT kmp_int32 __kmpc_omp_taskwait(ident_t *loc_ref, kmp_int32 gtid);
 3680 KMP_EXPORT kmp_int32 __kmpc_omp_taskyield(ident_t *loc_ref, kmp_int32 gtid,
 3680 KMP_EXPORT kmp_int32 __kmpc_omp_taskyield(ident_t *loc_ref, kmp_int32 gtid,
 3694 KMP_EXPORT kmp_int32 __kmpc_omp_task_with_deps(
 3695     ident_t *loc_ref, kmp_int32 gtid, kmp_task_t *new_task, kmp_int32 ndeps,
 3695     ident_t *loc_ref, kmp_int32 gtid, kmp_task_t *new_task, kmp_int32 ndeps,
 3696     kmp_depend_info_t *dep_list, kmp_int32 ndeps_noalias,
 3698 KMP_EXPORT void __kmpc_omp_wait_deps(ident_t *loc_ref, kmp_int32 gtid,
 3699                                      kmp_int32 ndeps,
 3701                                      kmp_int32 ndeps_noalias,
 3703 extern kmp_int32 __kmp_omp_task(kmp_int32 gtid, kmp_task_t *new_task,
 3703 extern kmp_int32 __kmp_omp_task(kmp_int32 gtid, kmp_task_t *new_task,
 3706 KMP_EXPORT kmp_int32 __kmpc_cancel(ident_t *loc_ref, kmp_int32 gtid,
 3706 KMP_EXPORT kmp_int32 __kmpc_cancel(ident_t *loc_ref, kmp_int32 gtid,
 3707                                    kmp_int32 cncl_kind);
 3708 KMP_EXPORT kmp_int32 __kmpc_cancellationpoint(ident_t *loc_ref, kmp_int32 gtid,
 3708 KMP_EXPORT kmp_int32 __kmpc_cancellationpoint(ident_t *loc_ref, kmp_int32 gtid,
 3709                                               kmp_int32 cncl_kind);
 3710 KMP_EXPORT kmp_int32 __kmpc_cancel_barrier(ident_t *loc_ref, kmp_int32 gtid);
 3710 KMP_EXPORT kmp_int32 __kmpc_cancel_barrier(ident_t *loc_ref, kmp_int32 gtid);
 3713 KMP_EXPORT void __kmpc_proxy_task_completed(kmp_int32 gtid, kmp_task_t *ptask);
 3715 KMP_EXPORT void __kmpc_taskloop(ident_t *loc, kmp_int32 gtid, kmp_task_t *task,
 3716                                 kmp_int32 if_val, kmp_uint64 *lb,
 3717                                 kmp_uint64 *ub, kmp_int64 st, kmp_int32 nogroup,
 3718                                 kmp_int32 sched, kmp_uint64 grainsize,
 3730 KMP_EXPORT kmp_int32 __kmpc_omp_reg_task_with_affinity(
 3731     ident_t *loc_ref, kmp_int32 gtid, kmp_task_t *new_task, kmp_int32 naffins,
 3731     ident_t *loc_ref, kmp_int32 gtid, kmp_task_t *new_task, kmp_int32 naffins,
 3735 KMP_EXPORT void __kmpc_init_lock(ident_t *loc, kmp_int32 gtid,
 3737 KMP_EXPORT void __kmpc_init_nest_lock(ident_t *loc, kmp_int32 gtid,
 3739 KMP_EXPORT void __kmpc_destroy_lock(ident_t *loc, kmp_int32 gtid,
 3741 KMP_EXPORT void __kmpc_destroy_nest_lock(ident_t *loc, kmp_int32 gtid,
 3743 KMP_EXPORT void __kmpc_set_lock(ident_t *loc, kmp_int32 gtid, void **user_lock);
 3744 KMP_EXPORT void __kmpc_set_nest_lock(ident_t *loc, kmp_int32 gtid,
 3746 KMP_EXPORT void __kmpc_unset_lock(ident_t *loc, kmp_int32 gtid,
 3748 KMP_EXPORT void __kmpc_unset_nest_lock(ident_t *loc, kmp_int32 gtid,
 3750 KMP_EXPORT int __kmpc_test_lock(ident_t *loc, kmp_int32 gtid, void **user_lock);
 3751 KMP_EXPORT int __kmpc_test_nest_lock(ident_t *loc, kmp_int32 gtid,
 3754 KMP_EXPORT void __kmpc_init_lock_with_hint(ident_t *loc, kmp_int32 gtid,
 3756 KMP_EXPORT void __kmpc_init_nest_lock_with_hint(ident_t *loc, kmp_int32 gtid,
 3762 KMP_EXPORT kmp_int32 __kmpc_reduce_nowait(
 3763     ident_t *loc, kmp_int32 global_tid, kmp_int32 num_vars, size_t reduce_size,
 3763     ident_t *loc, kmp_int32 global_tid, kmp_int32 num_vars, size_t reduce_size,
 3766 KMP_EXPORT void __kmpc_end_reduce_nowait(ident_t *loc, kmp_int32 global_tid,
 3768 KMP_EXPORT kmp_int32 __kmpc_reduce(
 3769     ident_t *loc, kmp_int32 global_tid, kmp_int32 num_vars, size_t reduce_size,
 3769     ident_t *loc, kmp_int32 global_tid, kmp_int32 num_vars, size_t reduce_size,
 3772 KMP_EXPORT void __kmpc_end_reduce(ident_t *loc, kmp_int32 global_tid,
 3778     ident_t *loc, kmp_int32 global_tid, kmp_int32 num_vars, size_t reduce_size,
 3778     ident_t *loc, kmp_int32 global_tid, kmp_int32 num_vars, size_t reduce_size,
 3783 KMP_EXPORT kmp_int32 __kmp_get_reduce_method(void);
 3791 KMP_EXPORT kmp_int32 __kmpc_in_parallel(ident_t *loc);
 3792 KMP_EXPORT void __kmpc_pop_num_threads(ident_t *loc, kmp_int32 global_tid);
 3793 KMP_EXPORT void __kmpc_push_num_threads(ident_t *loc, kmp_int32 global_tid,
 3794                                         kmp_int32 num_threads);
 3796 KMP_EXPORT void __kmpc_push_proc_bind(ident_t *loc, kmp_int32 global_tid,
 3798 KMP_EXPORT void __kmpc_push_num_teams(ident_t *loc, kmp_int32 global_tid,
 3799                                       kmp_int32 num_teams,
 3800                                       kmp_int32 num_threads);
 3801 KMP_EXPORT void __kmpc_fork_teams(ident_t *loc, kmp_int32 argc,
 3808 KMP_EXPORT void __kmpc_doacross_init(ident_t *loc, kmp_int32 gtid,
 3809                                      kmp_int32 num_dims,
 3811 KMP_EXPORT void __kmpc_doacross_wait(ident_t *loc, kmp_int32 gtid,
 3813 KMP_EXPORT void __kmpc_doacross_post(ident_t *loc, kmp_int32 gtid,
 3815 KMP_EXPORT void __kmpc_doacross_fini(ident_t *loc, kmp_int32 gtid);
 3817 KMP_EXPORT void *__kmpc_threadprivate_cached(ident_t *loc, kmp_int32 global_tid,
projects/openmp/runtime/src/kmp_atomic.cpp
 3434     kmp_int32 old_value, new_value;
projects/openmp/runtime/src/kmp_atomic.h
  362                                              kmp_int32 gtid) {
  382                                          kmp_int32 gtid) {
  387                                              kmp_int32 gtid) {
  477 void __kmpc_atomic_fixed4_add(ident_t *id_ref, int gtid, kmp_int32 *lhs,
  478                               kmp_int32 rhs);
  479 void __kmpc_atomic_fixed4_sub(ident_t *id_ref, int gtid, kmp_int32 *lhs,
  480                               kmp_int32 rhs);
  497 void __kmpc_atomic_fixed4_andb(ident_t *id_ref, int gtid, kmp_int32 *lhs,
  498                                kmp_int32 rhs);
  499 void __kmpc_atomic_fixed4_div(ident_t *id_ref, int gtid, kmp_int32 *lhs,
  500                               kmp_int32 rhs);
  503 void __kmpc_atomic_fixed4_mul(ident_t *id_ref, int gtid, kmp_int32 *lhs,
  504                               kmp_int32 rhs);
  505 void __kmpc_atomic_fixed4_orb(ident_t *id_ref, int gtid, kmp_int32 *lhs,
  506                               kmp_int32 rhs);
  507 void __kmpc_atomic_fixed4_shl(ident_t *id_ref, int gtid, kmp_int32 *lhs,
  508                               kmp_int32 rhs);
  509 void __kmpc_atomic_fixed4_shr(ident_t *id_ref, int gtid, kmp_int32 *lhs,
  510                               kmp_int32 rhs);
  513 void __kmpc_atomic_fixed4_xor(ident_t *id_ref, int gtid, kmp_int32 *lhs,
  514                               kmp_int32 rhs);
  550 void __kmpc_atomic_fixed4_andl(ident_t *id_ref, int gtid, kmp_int32 *lhs,
  551                                kmp_int32 rhs);
  552 void __kmpc_atomic_fixed4_orl(ident_t *id_ref, int gtid, kmp_int32 *lhs,
  553                               kmp_int32 rhs);
  563 void __kmpc_atomic_fixed4_max(ident_t *id_ref, int gtid, kmp_int32 *lhs,
  564                               kmp_int32 rhs);
  565 void __kmpc_atomic_fixed4_min(ident_t *id_ref, int gtid, kmp_int32 *lhs,
  566                               kmp_int32 rhs);
  597 void __kmpc_atomic_fixed4_neqv(ident_t *id_ref, int gtid, kmp_int32 *lhs,
  598                                kmp_int32 rhs);
  604 void __kmpc_atomic_fixed4_eqv(ident_t *id_ref, int gtid, kmp_int32 *lhs,
  605                               kmp_int32 rhs);
  718 void __kmpc_atomic_fixed4_sub_rev(ident_t *id_ref, int gtid, kmp_int32 *lhs,
  719                                   kmp_int32 rhs);
  720 void __kmpc_atomic_fixed4_div_rev(ident_t *id_ref, int gtid, kmp_int32 *lhs,
  721                                   kmp_int32 rhs);
  724 void __kmpc_atomic_fixed4_shl_rev(ident_t *id_ref, int gtid, kmp_int32 *lhs,
  725                                   kmp_int32 rhs);
  726 void __kmpc_atomic_fixed4_shr_rev(ident_t *id_ref, int gtid, kmp_int32 *lhs,
  727                                   kmp_int32 rhs);
  805 void __kmpc_atomic_fixed4_mul_float8(ident_t *id_ref, int gtid, kmp_int32 *lhs,
  807 void __kmpc_atomic_fixed4_div_float8(ident_t *id_ref, int gtid, kmp_int32 *lhs,
 1002 kmp_int32 __kmpc_atomic_fixed4_rd(ident_t *id_ref, int gtid, kmp_int32 *loc);
 1002 kmp_int32 __kmpc_atomic_fixed4_rd(ident_t *id_ref, int gtid, kmp_int32 *loc);
 1040 void __kmpc_atomic_fixed4_wr(ident_t *id_ref, int gtid, kmp_int32 *lhs,
 1041                              kmp_int32 rhs);
 1126 kmp_int32 __kmpc_atomic_fixed4_add_cpt(ident_t *id_ref, int gtid,
 1127                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1127                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1128 kmp_int32 __kmpc_atomic_fixed4_sub_cpt(ident_t *id_ref, int gtid,
 1129                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1129                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1150 kmp_int32 __kmpc_atomic_fixed4_andb_cpt(ident_t *id_ref, int gtid,
 1151                                         kmp_int32 *lhs, kmp_int32 rhs,
 1151                                         kmp_int32 *lhs, kmp_int32 rhs,
 1153 kmp_int32 __kmpc_atomic_fixed4_div_cpt(ident_t *id_ref, int gtid,
 1154                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1154                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1158 kmp_int32 __kmpc_atomic_fixed4_mul_cpt(ident_t *id_ref, int gtid,
 1159                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1159                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1160 kmp_int32 __kmpc_atomic_fixed4_orb_cpt(ident_t *id_ref, int gtid,
 1161                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1161                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1162 kmp_int32 __kmpc_atomic_fixed4_shl_cpt(ident_t *id_ref, int gtid,
 1163                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1163                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1164 kmp_int32 __kmpc_atomic_fixed4_shr_cpt(ident_t *id_ref, int gtid,
 1165                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1165                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1169 kmp_int32 __kmpc_atomic_fixed4_xor_cpt(ident_t *id_ref, int gtid,
 1170                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1170                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1216 kmp_int32 __kmpc_atomic_fixed4_andl_cpt(ident_t *id_ref, int gtid,
 1217                                         kmp_int32 *lhs, kmp_int32 rhs,
 1217                                         kmp_int32 *lhs, kmp_int32 rhs,
 1219 kmp_int32 __kmpc_atomic_fixed4_orl_cpt(ident_t *id_ref, int gtid,
 1220                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1220                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1235 kmp_int32 __kmpc_atomic_fixed4_max_cpt(ident_t *id_ref, int gtid,
 1236                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1236                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1237 kmp_int32 __kmpc_atomic_fixed4_min_cpt(ident_t *id_ref, int gtid,
 1238                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1238                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1268 kmp_int32 __kmpc_atomic_fixed4_neqv_cpt(ident_t *id_ref, int gtid,
 1269                                         kmp_int32 *lhs, kmp_int32 rhs,
 1269                                         kmp_int32 *lhs, kmp_int32 rhs,
 1279 kmp_int32 __kmpc_atomic_fixed4_eqv_cpt(ident_t *id_ref, int gtid,
 1280                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1280                                        kmp_int32 *lhs, kmp_int32 rhs, int flag);
 1433 kmp_int32 __kmpc_atomic_fixed4_sub_cpt_rev(ident_t *id_ref, int gtid,
 1434                                            kmp_int32 *lhs, kmp_int32 rhs,
 1434                                            kmp_int32 *lhs, kmp_int32 rhs,
 1436 kmp_int32 __kmpc_atomic_fixed4_div_cpt_rev(ident_t *id_ref, int gtid,
 1437                                            kmp_int32 *lhs, kmp_int32 rhs,
 1437                                            kmp_int32 *lhs, kmp_int32 rhs,
 1442 kmp_int32 __kmpc_atomic_fixed4_shl_cpt_rev(ident_t *id_ref, int gtid,
 1443                                            kmp_int32 *lhs, kmp_int32 rhs,
 1443                                            kmp_int32 *lhs, kmp_int32 rhs,
 1445 kmp_int32 __kmpc_atomic_fixed4_shr_cpt_rev(ident_t *id_ref, int gtid,
 1446                                            kmp_int32 *lhs, kmp_int32 rhs,
 1446                                            kmp_int32 *lhs, kmp_int32 rhs,
 1540 kmp_int32 __kmpc_atomic_fixed4_swp(ident_t *id_ref, int gtid, kmp_int32 *lhs,
 1540 kmp_int32 __kmpc_atomic_fixed4_swp(ident_t *id_ref, int gtid, kmp_int32 *lhs,
 1541                                    kmp_int32 rhs);
projects/openmp/runtime/src/kmp_barrier.cpp
  369     kmp_int32 parent_tid = (tid - 1) >> branch_bits;
  546       kmp_int32 parent_tid = tid & ~((1 << (level + branch_bits)) - 1);
  897     kmp_int32 child_tid;
 1152     kmp_int32 child_tid;
 1425         kmp_int32 cancel_request = KMP_ATOMIC_LD_RLX(&team->t.t_cancel_request);
projects/openmp/runtime/src/kmp_cancel.cpp
   29 kmp_int32 __kmpc_cancel(ident_t *loc_ref, kmp_int32 gtid, kmp_int32 cncl_kind) {
   29 kmp_int32 __kmpc_cancel(ident_t *loc_ref, kmp_int32 gtid, kmp_int32 cncl_kind) {
   29 kmp_int32 __kmpc_cancel(ident_t *loc_ref, kmp_int32 gtid, kmp_int32 cncl_kind) {
   51         kmp_int32 old = cancel_noreq;
   89           kmp_int32 old = cancel_noreq;
  135 kmp_int32 __kmpc_cancellationpoint(ident_t *loc_ref, kmp_int32 gtid,
  135 kmp_int32 __kmpc_cancellationpoint(ident_t *loc_ref, kmp_int32 gtid,
  136                                    kmp_int32 cncl_kind) {
  243 kmp_int32 __kmpc_cancel_barrier(ident_t *loc, kmp_int32 gtid) {
  243 kmp_int32 __kmpc_cancel_barrier(ident_t *loc, kmp_int32 gtid) {
projects/openmp/runtime/src/kmp_csupport.cpp
   39 void __kmpc_begin(ident_t *loc, kmp_int32 flags) {
  101 kmp_int32 __kmpc_global_thread_num(ident_t *loc) {
  102   kmp_int32 gtid = __kmp_entry_gtid();
  123 kmp_int32 __kmpc_global_num_threads(ident_t *loc) {
  136 kmp_int32 __kmpc_bound_thread_num(ident_t *loc) {
  146 kmp_int32 __kmpc_bound_num_threads(ident_t *loc) {
  158 kmp_int32 __kmpc_ok_to_fork(ident_t *loc) {
  220 kmp_int32 __kmpc_in_parallel(ident_t *loc) {
  233 void __kmpc_push_num_threads(ident_t *loc, kmp_int32 global_tid,
  234                              kmp_int32 num_threads) {
  241 void __kmpc_pop_num_threads(ident_t *loc, kmp_int32 global_tid) {
  247 void __kmpc_push_proc_bind(ident_t *loc, kmp_int32 global_tid,
  248                            kmp_int32 proc_bind) {
  265 void __kmpc_fork_call(ident_t *loc, kmp_int32 argc, kmpc_micro microtask, ...) {
  354 void __kmpc_push_num_teams(ident_t *loc, kmp_int32 global_tid,
  355                            kmp_int32 num_teams, kmp_int32 num_threads) {
  355                            kmp_int32 num_teams, kmp_int32 num_threads) {
  373 void __kmpc_fork_teams(ident_t *loc, kmp_int32 argc, kmpc_micro microtask,
  479 void __kmpc_serialized_parallel(ident_t *loc, kmp_int32 global_tid) {
  496 void __kmpc_end_serialized_parallel(ident_t *loc, kmp_int32 global_tid) {
  716 void __kmpc_barrier(ident_t *loc, kmp_int32 global_tid) {
  764 kmp_int32 __kmpc_master(ident_t *loc, kmp_int32 global_tid) {
  764 kmp_int32 __kmpc_master(ident_t *loc, kmp_int32 global_tid) {
  820 void __kmpc_end_master(ident_t *loc, kmp_int32 global_tid) {
  854 void __kmpc_ordered(ident_t *loc, kmp_int32 gtid) {
  926 void __kmpc_end_ordered(ident_t *loc, kmp_int32 gtid) {
  960                           kmp_int32 gtid, kmp_indirect_locktag_t tag) {
 1160 void __kmpc_critical(ident_t *loc, kmp_int32 global_tid,
 1384 void __kmpc_critical_with_hint(ident_t *loc, kmp_int32 global_tid,
 1515 void __kmpc_end_critical(ident_t *loc, kmp_int32 global_tid,
 1611 kmp_int32 __kmpc_barrier_master(ident_t *loc, kmp_int32 global_tid) {
 1611 kmp_int32 __kmpc_barrier_master(ident_t *loc, kmp_int32 global_tid) {
 1655 void __kmpc_end_barrier_master(ident_t *loc, kmp_int32 global_tid) {
 1671 kmp_int32 __kmpc_barrier_master_nowait(ident_t *loc, kmp_int32 global_tid) {
 1671 kmp_int32 __kmpc_barrier_master_nowait(ident_t *loc, kmp_int32 global_tid) {
 1672   kmp_int32 ret;
 1739 kmp_int32 __kmpc_single(ident_t *loc, kmp_int32 global_tid) {
 1739 kmp_int32 __kmpc_single(ident_t *loc, kmp_int32 global_tid) {
 1740   kmp_int32 rc = __kmp_enter_single(global_tid, loc, TRUE);
 1791 void __kmpc_end_single(ident_t *loc, kmp_int32 global_tid) {
 1817 void __kmpc_for_static_fini(ident_t *loc, kmp_int32 global_tid) {
 2067 void __kmpc_copyprivate(ident_t *loc, kmp_int32 gtid, size_t cpy_size,
 2069                         kmp_int32 didit) {
 2203 void __kmpc_init_lock_with_hint(ident_t *loc, kmp_int32 gtid, void **user_lock,
 2227 void __kmpc_init_nest_lock_with_hint(ident_t *loc, kmp_int32 gtid,
 2253 void __kmpc_init_lock(ident_t *loc, kmp_int32 gtid, void **user_lock) {
 2325 void __kmpc_init_nest_lock(ident_t *loc, kmp_int32 gtid, void **user_lock) {
 2399 void __kmpc_destroy_lock(ident_t *loc, kmp_int32 gtid, void **user_lock) {
 2478 void __kmpc_destroy_nest_lock(ident_t *loc, kmp_int32 gtid, void **user_lock) {
 2552 void __kmpc_set_lock(ident_t *loc, kmp_int32 gtid, void **user_lock) {
 2644 void __kmpc_set_nest_lock(ident_t *loc, kmp_int32 gtid, void **user_lock) {
 2755 void __kmpc_unset_lock(ident_t *loc, kmp_int32 gtid, void **user_lock) {
 2851 void __kmpc_unset_nest_lock(ident_t *loc, kmp_int32 gtid, void **user_lock) {
 2977 int __kmpc_test_lock(ident_t *loc, kmp_int32 gtid, void **user_lock) {
 3085 int __kmpc_test_nest_lock(ident_t *loc, kmp_int32 gtid, void **user_lock) {
 3220 __kmp_enter_critical_section_reduce_block(ident_t *loc, kmp_int32 global_tid,
 3284 __kmp_end_critical_section_reduce_block(ident_t *loc, kmp_int32 global_tid,
 3375 kmp_int32
 3376 __kmpc_reduce_nowait(ident_t *loc, kmp_int32 global_tid, kmp_int32 num_vars,
 3376 __kmpc_reduce_nowait(ident_t *loc, kmp_int32 global_tid, kmp_int32 num_vars,
 3530 void __kmpc_end_reduce_nowait(ident_t *loc, kmp_int32 global_tid,
 3592 kmp_int32 __kmpc_reduce(ident_t *loc, kmp_int32 global_tid, kmp_int32 num_vars,
 3592 kmp_int32 __kmpc_reduce(ident_t *loc, kmp_int32 global_tid, kmp_int32 num_vars,
 3592 kmp_int32 __kmpc_reduce(ident_t *loc, kmp_int32 global_tid, kmp_int32 num_vars,
 3709 void __kmpc_end_reduce(ident_t *loc, kmp_int32 global_tid,
 3828   kmp_int32 gtid;
 3842   kmp_int32 gtid;
 3988   kmp_int32 shft, num_dims, i;
 4036     kmp_int32 j = i * 4;
 4081   kmp_int32 shft, num_dims, i;
 4111     kmp_int32 j = i * 4;
 4135   kmp_int32 num_done;
projects/openmp/runtime/src/kmp_dispatch.cpp
 1142                                   kmp_int32 *p_last, T *p_lb, T *p_ub,
 1149   kmp_int32 last = 0;
 1315           kmp_int32 remaining;
 1882 static int __kmp_dispatch_next(ident_t *loc, int gtid, kmp_int32 *p_last,
 1929       kmp_int32 last;
 2024     kmp_int32 last = 0;
 2144 static void __kmp_dist_get_bounds(ident_t *loc, kmp_int32 gtid,
 2145                                   kmp_int32 *plastiter, T *plower, T *pupper,
 2280 void __kmpc_dispatch_init_4(ident_t *loc, kmp_int32 gtid,
 2281                             enum sched_type schedule, kmp_int32 lb,
 2282                             kmp_int32 ub, kmp_int32 st, kmp_int32 chunk) {
 2282                             kmp_int32 ub, kmp_int32 st, kmp_int32 chunk) {
 2282                             kmp_int32 ub, kmp_int32 st, kmp_int32 chunk) {
 2287   __kmp_dispatch_init<kmp_int32>(loc, gtid, schedule, lb, ub, st, chunk, true);
 2292 void __kmpc_dispatch_init_4u(ident_t *loc, kmp_int32 gtid,
 2294                              kmp_uint32 ub, kmp_int32 st, kmp_int32 chunk) {
 2294                              kmp_uint32 ub, kmp_int32 st, kmp_int32 chunk) {
 2305 void __kmpc_dispatch_init_8(ident_t *loc, kmp_int32 gtid,
 2318 void __kmpc_dispatch_init_8u(ident_t *loc, kmp_int32 gtid,
 2337 void __kmpc_dist_dispatch_init_4(ident_t *loc, kmp_int32 gtid,
 2338                                  enum sched_type schedule, kmp_int32 *p_last,
 2339                                  kmp_int32 lb, kmp_int32 ub, kmp_int32 st,
 2339                                  kmp_int32 lb, kmp_int32 ub, kmp_int32 st,
 2339                                  kmp_int32 lb, kmp_int32 ub, kmp_int32 st,
 2340                                  kmp_int32 chunk) {
 2345   __kmp_dist_get_bounds<kmp_int32>(loc, gtid, p_last, &lb, &ub, st);
 2346   __kmp_dispatch_init<kmp_int32>(loc, gtid, schedule, lb, ub, st, chunk, true);
 2349 void __kmpc_dist_dispatch_init_4u(ident_t *loc, kmp_int32 gtid,
 2350                                   enum sched_type schedule, kmp_int32 *p_last,
 2351                                   kmp_uint32 lb, kmp_uint32 ub, kmp_int32 st,
 2352                                   kmp_int32 chunk) {
 2361 void __kmpc_dist_dispatch_init_8(ident_t *loc, kmp_int32 gtid,
 2362                                  enum sched_type schedule, kmp_int32 *p_last,
 2373 void __kmpc_dist_dispatch_init_8u(ident_t *loc, kmp_int32 gtid,
 2374                                   enum sched_type schedule, kmp_int32 *p_last,
 2398 int __kmpc_dispatch_next_4(ident_t *loc, kmp_int32 gtid, kmp_int32 *p_last,
 2398 int __kmpc_dispatch_next_4(ident_t *loc, kmp_int32 gtid, kmp_int32 *p_last,
 2399                            kmp_int32 *p_lb, kmp_int32 *p_ub, kmp_int32 *p_st) {
 2399                            kmp_int32 *p_lb, kmp_int32 *p_ub, kmp_int32 *p_st) {
 2399                            kmp_int32 *p_lb, kmp_int32 *p_ub, kmp_int32 *p_st) {
 2403   return __kmp_dispatch_next<kmp_int32>(loc, gtid, p_last, p_lb, p_ub, p_st
 2414 int __kmpc_dispatch_next_4u(ident_t *loc, kmp_int32 gtid, kmp_int32 *p_last,
 2414 int __kmpc_dispatch_next_4u(ident_t *loc, kmp_int32 gtid, kmp_int32 *p_last,
 2416                             kmp_int32 *p_st) {
 2431 int __kmpc_dispatch_next_8(ident_t *loc, kmp_int32 gtid, kmp_int32 *p_last,
 2431 int __kmpc_dispatch_next_8(ident_t *loc, kmp_int32 gtid, kmp_int32 *p_last,
 2447 int __kmpc_dispatch_next_8u(ident_t *loc, kmp_int32 gtid, kmp_int32 *p_last,
 2447 int __kmpc_dispatch_next_8u(ident_t *loc, kmp_int32 gtid, kmp_int32 *p_last,
 2467 void __kmpc_dispatch_fini_4(ident_t *loc, kmp_int32 gtid) {
 2474 void __kmpc_dispatch_fini_8(ident_t *loc, kmp_int32 gtid) {
 2481 void __kmpc_dispatch_fini_4u(ident_t *loc, kmp_int32 gtid) {
 2488 void __kmpc_dispatch_fini_8u(ident_t *loc, kmp_int32 gtid) {
 2569 void __kmp_aux_dispatch_init_4(ident_t *loc, kmp_int32 gtid,
 2570                                enum sched_type schedule, kmp_int32 lb,
 2571                                kmp_int32 ub, kmp_int32 st, kmp_int32 chunk,
 2571                                kmp_int32 ub, kmp_int32 st, kmp_int32 chunk,
 2571                                kmp_int32 ub, kmp_int32 st, kmp_int32 chunk,
 2573   __kmp_dispatch_init<kmp_int32>(loc, gtid, schedule, lb, ub, st, chunk,
 2577 void __kmp_aux_dispatch_init_4u(ident_t *loc, kmp_int32 gtid,
 2579                                 kmp_uint32 ub, kmp_int32 st, kmp_int32 chunk,
 2579                                 kmp_uint32 ub, kmp_int32 st, kmp_int32 chunk,
 2585 void __kmp_aux_dispatch_init_8(ident_t *loc, kmp_int32 gtid,
 2593 void __kmp_aux_dispatch_init_8u(ident_t *loc, kmp_int32 gtid,
 2601 void __kmp_aux_dispatch_fini_chunk_4(ident_t *loc, kmp_int32 gtid) {
 2605 void __kmp_aux_dispatch_fini_chunk_8(ident_t *loc, kmp_int32 gtid) {
 2609 void __kmp_aux_dispatch_fini_chunk_4u(ident_t *loc, kmp_int32 gtid) {
 2613 void __kmp_aux_dispatch_fini_chunk_8u(ident_t *loc, kmp_int32 gtid) {
projects/openmp/runtime/src/kmp_dispatch.h
   58     dispatch_shared_info_template<T> volatile *sh, kmp_int32 *p_last, T *p_lb,
  139   kmp_int32 ordered_dummy[KMP_MAX_ORDERED - 3];
  174   volatile kmp_int32 doacross_buf_idx; // teamwise index
  176   kmp_int32 doacross_num_done; // count finished threads
  197 __forceinline kmp_int32 test_then_add<kmp_int32>(volatile kmp_int32 *p,
  197 __forceinline kmp_int32 test_then_add<kmp_int32>(volatile kmp_int32 *p,
  197 __forceinline kmp_int32 test_then_add<kmp_int32>(volatile kmp_int32 *p,
  198                                                  kmp_int32 d) {
  199   kmp_int32 r;
  216 __forceinline kmp_int32 test_then_inc_acq<kmp_int32>(volatile kmp_int32 *p) {
  216 __forceinline kmp_int32 test_then_inc_acq<kmp_int32>(volatile kmp_int32 *p) {
  216 __forceinline kmp_int32 test_then_inc_acq<kmp_int32>(volatile kmp_int32 *p) {
  217   kmp_int32 r;
  233 __forceinline kmp_int32 test_then_inc<kmp_int32>(volatile kmp_int32 *p) {
  233 __forceinline kmp_int32 test_then_inc<kmp_int32>(volatile kmp_int32 *p) {
  233 __forceinline kmp_int32 test_then_inc<kmp_int32>(volatile kmp_int32 *p) {
  234   kmp_int32 r;
  248 static __forceinline kmp_int32 compare_and_swap(volatile T *p, T c, T s);
  251 __forceinline kmp_int32 compare_and_swap<kmp_int32>(volatile kmp_int32 *p,
  251 __forceinline kmp_int32 compare_and_swap<kmp_int32>(volatile kmp_int32 *p,
  251 __forceinline kmp_int32 compare_and_swap<kmp_int32>(volatile kmp_int32 *p,
  252                                                     kmp_int32 c, kmp_int32 s) {
  252                                                     kmp_int32 c, kmp_int32 s) {
  257 __forceinline kmp_int32 compare_and_swap<kmp_int64>(volatile kmp_int64 *p,
projects/openmp/runtime/src/kmp_global.cpp
   62 std::atomic<kmp_int32> __kmp_team_counter = ATOMIC_VAR_INIT(0);
   63 std::atomic<kmp_int32> __kmp_task_counter = ATOMIC_VAR_INIT(0);
  284 kmp_int32 __kmp_default_device = 0;
  287 kmp_int32 __kmp_max_task_priority = 0;
  404 kmp_int32 __kmp_use_yield = 1;
  406 kmp_int32 __kmp_use_yield_exp_set = 0;
projects/openmp/runtime/src/kmp_gsupport.cpp
  126   kmp_int32 rc = __kmp_enter_single(gtid, &loc, FALSE);
  412 static void __kmp_GOMP_serialized_parallel(ident_t *loc, kmp_int32 gtid,
 1129   kmp_int32 flags = 0;
 1547 static kmp_int32 __kmp_gomp_to_omp_cancellation_kind(int gomp_kind) {
 1548   kmp_int32 cncl_kind = 0;
 1571   kmp_int32 cncl_kind = __kmp_gomp_to_omp_cancellation_kind(which);
 1581   kmp_int32 cncl_kind = __kmp_gomp_to_omp_cancellation_kind(which);
 1642                                 kmp_int32 last_private) {
 1659   typedef void (*p_task_dup_t)(kmp_task_t *, kmp_task_t *, kmp_int32);
 1664   kmp_int32 flags = 0;
projects/openmp/runtime/src/kmp_itt.cpp
   27 kmp_int32 __kmp_barrier_domain_count;
   28 kmp_int32 __kmp_region_domain_count;
   32 kmp_int32 __kmp_itt_region_team_size[KMP_MAX_FRAME_DOMAINS];
   52 kmp_int32 __kmp_itt_prepare_delay = 0;
projects/openmp/runtime/src/kmp_itt.h
   30 extern kmp_int32 __kmp_itt_prepare_delay;
  274 extern kmp_int32 __kmp_barrier_domain_count;
  275 extern kmp_int32 __kmp_region_domain_count;
  279 extern kmp_int32 __kmp_itt_region_team_size[KMP_MAX_FRAME_DOMAINS];
projects/openmp/runtime/src/kmp_itt.inl
  527         kmp_int32 expl = 0;
projects/openmp/runtime/src/kmp_lock.cpp
   72 static kmp_int32 __kmp_get_tas_lock_owner(kmp_tas_lock_t *lck) {
   81 __kmp_acquire_tas_lock_timed_template(kmp_tas_lock_t *lck, kmp_int32 gtid) {
   91   kmp_int32 tas_free = KMP_LOCK_FREE(tas);
   92   kmp_int32 tas_busy = KMP_LOCK_BUSY(gtid + 1, tas);
  113 int __kmp_acquire_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) {
  120                                               kmp_int32 gtid) {
  132 int __kmp_test_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) {
  133   kmp_int32 tas_free = KMP_LOCK_FREE(tas);
  134   kmp_int32 tas_busy = KMP_LOCK_BUSY(gtid + 1, tas);
  144                                            kmp_int32 gtid) {
  153 int __kmp_release_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) {
  166                                               kmp_int32 gtid) {
  203 int __kmp_acquire_nested_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) {
  218                                                      kmp_int32 gtid) {
  226 int __kmp_test_nested_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) {
  243                                                   kmp_int32 gtid) {
  251 int __kmp_release_nested_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) {
  263                                                      kmp_int32 gtid) {
  309 static kmp_int32 __kmp_get_futex_lock_owner(kmp_futex_lock_t *lck) {
  318 __kmp_acquire_futex_lock_timed_template(kmp_futex_lock_t *lck, kmp_int32 gtid) {
  319   kmp_int32 gtid_code = (gtid + 1) << 1;
  334   kmp_int32 poll_val;
  340     kmp_int32 cond = KMP_LOCK_STRIP(poll_val) & 1;
  375     kmp_int32 rc;
  399 int __kmp_acquire_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) {
  406                                                 kmp_int32 gtid) {
  418 int __kmp_test_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) {
  428                                              kmp_int32 gtid) {
  437 int __kmp_release_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) {
  446   kmp_int32 poll_val = KMP_XCHG_FIXED32(&(lck->lk.poll), KMP_LOCK_FREE(futex));
  470                                                 kmp_int32 gtid) {
  507 int __kmp_acquire_nested_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) {
  522                                                        kmp_int32 gtid) {
  530 int __kmp_test_nested_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) {
  547                                                     kmp_int32 gtid) {
  555 int __kmp_release_nested_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) {
  567                                                        kmp_int32 gtid) {
  608 static kmp_int32 __kmp_get_ticket_lock_owner(kmp_ticket_lock_t *lck) {
  626                                          kmp_int32 gtid) {
  645 int __kmp_acquire_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) {
  652                                                  kmp_int32 gtid) {
  676 int __kmp_test_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) {
  693                                               kmp_int32 gtid) {
  716 int __kmp_release_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) {
  732                                                  kmp_int32 gtid) {
  808 int __kmp_acquire_nested_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) {
  827                                                         kmp_int32 gtid) {
  843 int __kmp_test_nested_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) {
  865                                                      kmp_int32 gtid) {
  881 int __kmp_release_nested_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) {
  895                                                         kmp_int32 gtid) {
 1075 static kmp_int32 __kmp_get_queuing_lock_owner(kmp_queuing_lock_t *lck) {
 1089                                           kmp_int32 gtid) {
 1091   volatile kmp_int32 *head_id_p = &lck->lk.head_id;
 1092   volatile kmp_int32 *tail_id_p = &lck->lk.tail_id;
 1094   kmp_int32 need_mf = 1;
 1127     kmp_int32 enqueued;
 1128     kmp_int32 head;
 1129     kmp_int32 tail;
 1180       kmp_int32 grabbed_lock;
 1281 int __kmp_acquire_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) {
 1290                                                   kmp_int32 gtid) {
 1308 int __kmp_test_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) {
 1309   volatile kmp_int32 *head_id_p = &lck->lk.head_id;
 1310   kmp_int32 head;
 1342                                                kmp_int32 gtid) {
 1359 int __kmp_release_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) {
 1361   volatile kmp_int32 *head_id_p = &lck->lk.head_id;
 1362   volatile kmp_int32 *tail_id_p = &lck->lk.tail_id;
 1384     kmp_int32 dequeued;
 1385     kmp_int32 head;
 1386     kmp_int32 tail;
 1435         volatile kmp_int32 *waiting_id_p;
 1503                                                   kmp_int32 gtid) {
 1562 int __kmp_acquire_nested_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) {
 1581                                               kmp_int32 gtid) {
 1592 int __kmp_test_nested_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) {
 1611                                                       kmp_int32 gtid) {
 1622 int __kmp_release_nested_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) {
 1637                                               kmp_int32 gtid) {
 2003                                            kmp_int32 gtid) {
 2013                                          kmp_int32 gtid) {
 2060 static int __kmp_test_adaptive_lock(kmp_adaptive_lock_t *lck, kmp_int32 gtid) {
 2080                                                 kmp_int32 gtid) {
 2106                                         kmp_int32 gtid) {
 2139                                                     kmp_int32 gtid) {
 2154                                        kmp_int32 gtid) {
 2169                                                    kmp_int32 gtid) {
 2224 static kmp_int32 __kmp_get_drdpa_lock_owner(kmp_drdpa_lock_t *lck) {
 2233 __kmp_acquire_drdpa_lock_timed_template(kmp_drdpa_lock_t *lck, kmp_int32 gtid) {
 2365 int __kmp_acquire_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) {
 2372                                                 kmp_int32 gtid) {
 2390 int __kmp_test_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) {
 2418                                              kmp_int32 gtid) {
 2435 int __kmp_release_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) {
 2450                                                 kmp_int32 gtid) {
 2523 int __kmp_acquire_nested_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) {
 2541                                                         kmp_int32 gtid) {
 2552 int __kmp_test_nested_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) {
 2571                                                     kmp_int32 gtid) {
 2582 int __kmp_release_nested_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) {
 2596                                                        kmp_int32 gtid) {
 2723 static void __kmp_acquire_hle_lock(kmp_dyna_lock_t *lck, kmp_int32 gtid) {
 2738                                                kmp_int32 gtid) {
 2742 static int __kmp_release_hle_lock(kmp_dyna_lock_t *lck, kmp_int32 gtid) {
 2751                                               kmp_int32 gtid) {
 2755 static int __kmp_test_hle_lock(kmp_dyna_lock_t *lck, kmp_int32 gtid) {
 2760                                            kmp_int32 gtid) {
 2776 static void __kmp_acquire_rtm_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) {
 2799                                                kmp_int32 gtid) {
 2803 static int __kmp_release_rtm_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) {
 2815                                               kmp_int32 gtid) {
 2819 static int __kmp_test_rtm_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) {
 2834                                            kmp_int32 gtid) {
 2844 static int __kmp_set_indirect_lock(kmp_dyna_lock_t *lock, kmp_int32);
 2845 static int __kmp_unset_indirect_lock(kmp_dyna_lock_t *lock, kmp_int32);
 2846 static int __kmp_test_indirect_lock(kmp_dyna_lock_t *lock, kmp_int32);
 2848                                                kmp_int32);
 2850                                                  kmp_int32);
 2852                                                 kmp_int32);
 2918 static int (*direct_set[])(kmp_dyna_lock_t *, kmp_int32) = {
 2923 static int (*direct_set_check[])(kmp_dyna_lock_t *, kmp_int32) = {
 2931 static int (*direct_unset[])(kmp_dyna_lock_t *, kmp_int32) = {
 2933 static int (*direct_test[])(kmp_dyna_lock_t *, kmp_int32) = {
 2938 static int (*direct_unset_check[])(kmp_dyna_lock_t *, kmp_int32) = {
 2941 static int (*direct_test_check[])(kmp_dyna_lock_t *, kmp_int32) = {
 2947 int (**__kmp_direct_set)(kmp_dyna_lock_t *, kmp_int32) = 0;
 2948 int (**__kmp_direct_unset)(kmp_dyna_lock_t *, kmp_int32) = 0;
 2949 int (**__kmp_direct_test)(kmp_dyna_lock_t *, kmp_int32) = 0;
 2971                              kmp_int32) = {KMP_FOREACH_I_LOCK(expand, acquire)};
 2975 static int (*indirect_set_check[])(kmp_user_lock_p, kmp_int32) = {
 2982 static int (*indirect_unset[])(kmp_user_lock_p, kmp_int32) = {
 2985                               kmp_int32) = {KMP_FOREACH_I_LOCK(expand, test)};
 2989 static int (*indirect_unset_check[])(kmp_user_lock_p, kmp_int32) = {
 2991 static int (*indirect_test_check[])(kmp_user_lock_p, kmp_int32) = {
 2997 int (**__kmp_indirect_set)(kmp_user_lock_p, kmp_int32) = 0;
 2998 int (**__kmp_indirect_unset)(kmp_user_lock_p, kmp_int32) = 0;
 2999 int (**__kmp_indirect_test)(kmp_user_lock_p, kmp_int32) = 0;
 3026                                                   kmp_int32 gtid,
 3153 static int __kmp_set_indirect_lock(kmp_dyna_lock_t *lock, kmp_int32 gtid) {
 3158 static int __kmp_unset_indirect_lock(kmp_dyna_lock_t *lock, kmp_int32 gtid) {
 3163 static int __kmp_test_indirect_lock(kmp_dyna_lock_t *lock, kmp_int32 gtid) {
 3169                                                kmp_int32 gtid) {
 3176                                                  kmp_int32 gtid) {
 3183                                                 kmp_int32 gtid) {
 3192 kmp_int32 __kmp_get_user_lock_owner(kmp_user_lock_p lck, kmp_uint32 seq) {
projects/openmp/runtime/src/kmp_lock.h
  123   std::atomic<kmp_int32> poll;
  124   kmp_int32 depth_locked; // depth locked, for nested locks only
  144 extern int __kmp_acquire_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid);
  145 extern int __kmp_test_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid);
  146 extern int __kmp_release_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid);
  150 extern int __kmp_acquire_nested_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid);
  151 extern int __kmp_test_nested_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid);
  152 extern int __kmp_release_nested_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid);
  179   volatile kmp_int32 poll; // KMP_LOCK_FREE(futex) => unlocked
  182   kmp_int32 depth_locked; // depth locked, for nested locks only
  203 extern int __kmp_acquire_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid);
  204 extern int __kmp_test_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid);
  205 extern int __kmp_release_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid);
  210                                            kmp_int32 gtid);
  211 extern int __kmp_test_nested_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid);
  213                                            kmp_int32 gtid);
  286 extern int __kmp_acquire_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid);
  287 extern int __kmp_test_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid);
  289                                              kmp_int32 gtid);
  290 extern int __kmp_release_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid);
  295                                             kmp_int32 gtid);
  297                                          kmp_int32 gtid);
  299                                             kmp_int32 gtid);
  363   volatile kmp_int32
  366   volatile kmp_int32
  374   volatile kmp_int32 owner_id; // (gtid+1) of owning thread, 0 if unlocked
  375   kmp_int32 depth_locked; // depth locked, for nested locks only
  394 extern int __kmp_acquire_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid);
  395 extern int __kmp_test_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid);
  396 extern int __kmp_release_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid);
  401                                              kmp_int32 gtid);
  403                                           kmp_int32 gtid);
  405                                              kmp_int32 gtid);
  475   kmp_int32 depth_locked; // depth locked
  491 extern int __kmp_acquire_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid);
  492 extern int __kmp_test_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid);
  493 extern int __kmp_release_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid);
  498                                            kmp_int32 gtid);
  499 extern int __kmp_test_nested_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid);
  501                                            kmp_int32 gtid);
  555 static inline int __kmp_acquire_lock(kmp_lock_t *lck, kmp_int32 gtid) {
  559 static inline int __kmp_test_lock(kmp_lock_t *lck, kmp_int32 gtid) {
  563 static inline void __kmp_release_lock(kmp_lock_t *lck, kmp_int32 gtid) {
 1126 extern int (**__kmp_direct_set)(kmp_dyna_lock_t *, kmp_int32);
 1127 extern int (**__kmp_direct_unset)(kmp_dyna_lock_t *, kmp_int32);
 1128 extern int (**__kmp_direct_test)(kmp_dyna_lock_t *, kmp_int32);
 1134 extern int (**__kmp_indirect_set)(kmp_user_lock_p, kmp_int32);
 1135 extern int (**__kmp_indirect_unset)(kmp_user_lock_p, kmp_int32);
 1136 extern int (**__kmp_indirect_test)(kmp_user_lock_p, kmp_int32);
 1178 __kmp_allocate_indirect_lock(void **, kmp_int32, kmp_indirect_locktag_t);
 1248 extern kmp_int32 __kmp_get_user_lock_owner(kmp_user_lock_p, kmp_uint32);
projects/openmp/runtime/src/kmp_os.h
  684   kmp_int32 tmp =
projects/openmp/runtime/src/kmp_runtime.cpp
  726     kmp_int32 old_this = th->th.th_local.this_construct;
 1132 void __kmp_serialized_parallel(ident_t *loc, kmp_int32 global_tid) {
 1391                     kmp_int32 argc, microtask_t microtask, launch_t invoker,
 8077     ident_t *loc, kmp_int32 global_tid, kmp_int32 num_vars, size_t reduce_size,
 8077     ident_t *loc, kmp_int32 global_tid, kmp_int32 num_vars, size_t reduce_size,
 8228 kmp_int32 __kmp_get_reduce_method(void) {
projects/openmp/runtime/src/kmp_sched.cpp
   65 static void __kmp_for_static_init(ident_t *loc, kmp_int32 global_tid,
   66                                   kmp_int32 schedtype, kmp_int32 *plastiter,
   66                                   kmp_int32 schedtype, kmp_int32 *plastiter,
   83   kmp_int32 gtid = global_tid;
  420 static void __kmp_dist_for_static_init(ident_t *loc, kmp_int32 gtid,
  421                                        kmp_int32 schedule, kmp_int32 *plastiter,
  421                                        kmp_int32 schedule, kmp_int32 *plastiter,
  661 static void __kmp_team_static_init(ident_t *loc, kmp_int32 gtid,
  662                                    kmp_int32 *p_last, T *p_lb, T *p_ub,
  794 void __kmpc_for_static_init_4(ident_t *loc, kmp_int32 gtid, kmp_int32 schedtype,
  794 void __kmpc_for_static_init_4(ident_t *loc, kmp_int32 gtid, kmp_int32 schedtype,
  795                               kmp_int32 *plastiter, kmp_int32 *plower,
  795                               kmp_int32 *plastiter, kmp_int32 *plower,
  796                               kmp_int32 *pupper, kmp_int32 *pstride,
  796                               kmp_int32 *pupper, kmp_int32 *pstride,
  797                               kmp_int32 incr, kmp_int32 chunk) {
  797                               kmp_int32 incr, kmp_int32 chunk) {
  798   __kmp_for_static_init<kmp_int32>(loc, gtid, schedtype, plastiter, plower,
  810 void __kmpc_for_static_init_4u(ident_t *loc, kmp_int32 gtid,
  811                                kmp_int32 schedtype, kmp_int32 *plastiter,
  811                                kmp_int32 schedtype, kmp_int32 *plastiter,
  813                                kmp_int32 *pstride, kmp_int32 incr,
  813                                kmp_int32 *pstride, kmp_int32 incr,
  814                                kmp_int32 chunk) {
  827 void __kmpc_for_static_init_8(ident_t *loc, kmp_int32 gtid, kmp_int32 schedtype,
  827 void __kmpc_for_static_init_8(ident_t *loc, kmp_int32 gtid, kmp_int32 schedtype,
  828                               kmp_int32 *plastiter, kmp_int64 *plower,
  843 void __kmpc_for_static_init_8u(ident_t *loc, kmp_int32 gtid,
  844                                kmp_int32 schedtype, kmp_int32 *plastiter,
  844                                kmp_int32 schedtype, kmp_int32 *plastiter,
  882 void __kmpc_dist_for_static_init_4(ident_t *loc, kmp_int32 gtid,
  883                                    kmp_int32 schedule, kmp_int32 *plastiter,
  883                                    kmp_int32 schedule, kmp_int32 *plastiter,
  884                                    kmp_int32 *plower, kmp_int32 *pupper,
  884                                    kmp_int32 *plower, kmp_int32 *pupper,
  885                                    kmp_int32 *pupperD, kmp_int32 *pstride,
  885                                    kmp_int32 *pupperD, kmp_int32 *pstride,
  886                                    kmp_int32 incr, kmp_int32 chunk) {
  886                                    kmp_int32 incr, kmp_int32 chunk) {
  887   __kmp_dist_for_static_init<kmp_int32>(loc, gtid, schedule, plastiter, plower,
  894 void __kmpc_dist_for_static_init_4u(ident_t *loc, kmp_int32 gtid,
  895                                     kmp_int32 schedule, kmp_int32 *plastiter,
  895                                     kmp_int32 schedule, kmp_int32 *plastiter,
  897                                     kmp_uint32 *pupperD, kmp_int32 *pstride,
  898                                     kmp_int32 incr, kmp_int32 chunk) {
  898                                     kmp_int32 incr, kmp_int32 chunk) {
  906 void __kmpc_dist_for_static_init_8(ident_t *loc, kmp_int32 gtid,
  907                                    kmp_int32 schedule, kmp_int32 *plastiter,
  907                                    kmp_int32 schedule, kmp_int32 *plastiter,
  918 void __kmpc_dist_for_static_init_8u(ident_t *loc, kmp_int32 gtid,
  919                                     kmp_int32 schedule, kmp_int32 *plastiter,
  919                                     kmp_int32 schedule, kmp_int32 *plastiter,
  956 void __kmpc_team_static_init_4(ident_t *loc, kmp_int32 gtid, kmp_int32 *p_last,
  956 void __kmpc_team_static_init_4(ident_t *loc, kmp_int32 gtid, kmp_int32 *p_last,
  957                                kmp_int32 *p_lb, kmp_int32 *p_ub,
  957                                kmp_int32 *p_lb, kmp_int32 *p_ub,
  958                                kmp_int32 *p_st, kmp_int32 incr,
  958                                kmp_int32 *p_st, kmp_int32 incr,
  959                                kmp_int32 chunk) {
  961   __kmp_team_static_init<kmp_int32>(loc, gtid, p_last, p_lb, p_ub, p_st, incr,
  968 void __kmpc_team_static_init_4u(ident_t *loc, kmp_int32 gtid, kmp_int32 *p_last,
  968 void __kmpc_team_static_init_4u(ident_t *loc, kmp_int32 gtid, kmp_int32 *p_last,
  970                                 kmp_int32 *p_st, kmp_int32 incr,
  970                                 kmp_int32 *p_st, kmp_int32 incr,
  971                                 kmp_int32 chunk) {
  980 void __kmpc_team_static_init_8(ident_t *loc, kmp_int32 gtid, kmp_int32 *p_last,
  980 void __kmpc_team_static_init_8(ident_t *loc, kmp_int32 gtid, kmp_int32 *p_last,
  992 void __kmpc_team_static_init_8u(ident_t *loc, kmp_int32 gtid, kmp_int32 *p_last,
  992 void __kmpc_team_static_init_8u(ident_t *loc, kmp_int32 gtid, kmp_int32 *p_last,
projects/openmp/runtime/src/kmp_taskdeps.cpp
   60 static inline kmp_int32 __kmp_dephash_hash(kmp_intptr_t addr, size_t hsize) {
   75   kmp_int32 size_to_allocate =
   96       kmp_int32 new_bucket = __kmp_dephash_hash(entry->addr, h->size);
  126   kmp_int32 size =
  158   kmp_int32 bucket = __kmp_dephash_hash(addr, h->size);
  236 static inline kmp_int32
  237 __kmp_depnode_link_successor(kmp_int32 gtid, kmp_info_t *thread,
  242   kmp_int32 npredecessors = 0;
  263 static inline kmp_int32 __kmp_depnode_link_successor(kmp_int32 gtid,
  263 static inline kmp_int32 __kmp_depnode_link_successor(kmp_int32 gtid,
  270   kmp_int32 npredecessors = 0;
  289 static inline kmp_int32
  290 __kmp_process_deps(kmp_int32 gtid, kmp_depnode_t *node, kmp_dephash_t **hash,
  291                    bool dep_barrier, kmp_int32 ndeps,
  298   kmp_int32 npredecessors = 0;
  299   for (kmp_int32 i = 0; i < ndeps; i++) {
  381       kmp_int32 m;
  409 static bool __kmp_check_deps(kmp_int32 gtid, kmp_depnode_t *node,
  411                              bool dep_barrier, kmp_int32 ndeps,
  413                              kmp_int32 ndeps_noalias,
  505 kmp_int32 __kmpc_omp_task_with_deps(ident_t *loc_ref, kmp_int32 gtid,
  505 kmp_int32 __kmpc_omp_task_with_deps(ident_t *loc_ref, kmp_int32 gtid,
  506                                     kmp_task_t *new_task, kmp_int32 ndeps,
  508                                     kmp_int32 ndeps_noalias,
  541     kmp_int32 i;
  636   kmp_int32 ret = __kmp_omp_task(gtid, new_task, true);
  656 void __kmpc_omp_wait_deps(ident_t *loc_ref, kmp_int32 gtid, kmp_int32 ndeps,
  656 void __kmpc_omp_wait_deps(ident_t *loc_ref, kmp_int32 gtid, kmp_int32 ndeps,
  657                           kmp_depend_info_t *dep_list, kmp_int32 ndeps_noalias,
projects/openmp/runtime/src/kmp_taskdeps.h
   27   kmp_int32 n = KMP_ATOMIC_DEC(&node->dn.nrefs) - 1;
   88 static inline void __kmp_release_deps(kmp_int32 gtid, kmp_taskdata_t *task) {
  114     kmp_int32 npredecessors = KMP_ATOMIC_DEC(&successor->dn.npredecessors) - 1;
projects/openmp/runtime/src/kmp_tasking.cpp
   33 static void __kmp_bottom_half_finish_proxy(kmp_int32 gtid, kmp_task_t *ptask);
  253 static bool __kmp_task_is_allowed(int gtid, const kmp_int32 is_constrained,
  265       kmp_int32 level = current->td_level;
  300   kmp_int32 size = TASK_DEQUE_SIZE(thread_data->td);
  301   kmp_int32 new_size = 2 * size;
  324 static kmp_int32 __kmp_push_task(kmp_int32 gtid, kmp_task_t *task) {
  324 static kmp_int32 __kmp_push_task(kmp_int32 gtid, kmp_task_t *task) {
  328   kmp_int32 tid = __kmp_tid_from_gtid(gtid);
  337     kmp_int32 counter = 1 + KMP_ATOMIC_INC(&taskdata->td_untied_count);
  492 static void __kmp_task_start(kmp_int32 gtid, kmp_task_t *task,
  558                                      kmp_int32 gtid) {
  599 static void __kmpc_omp_task_begin_if0_template(ident_t *loc_ref, kmp_int32 gtid,
  613     kmp_int32 counter = 1 + KMP_ATOMIC_INC(&taskdata->td_untied_count);
  650 static void __kmpc_omp_task_begin_if0_ompt(ident_t *loc_ref, kmp_int32 gtid,
  665 void __kmpc_omp_task_begin_if0(ident_t *loc_ref, kmp_int32 gtid,
  703 static void __kmp_free_task(kmp_int32 gtid, kmp_taskdata_t *taskdata,
  735 static void __kmp_free_task_and_ancestors(kmp_int32 gtid,
  740   kmp_int32 team_serial =
  745   kmp_int32 children = KMP_ATOMIC_DEC(&taskdata->td_allocated_child_tasks) - 1;
  803 static void __kmp_task_finish(kmp_int32 gtid, kmp_task_t *task,
  809   kmp_int32 children = 0;
  827     kmp_int32 counter = KMP_ATOMIC_DEC(&taskdata->td_untied_count) - 1;
  960                                                   kmp_int32 gtid,
  984 void __kmpc_omp_task_complete_if0_ompt(ident_t *loc_ref, kmp_int32 gtid,
  995 void __kmpc_omp_task_complete_if0(ident_t *loc_ref, kmp_int32 gtid,
 1154 kmp_task_t *__kmp_task_alloc(ident_t *loc_ref, kmp_int32 gtid,
 1215       kmp_int32 tid = thread->th.th_info.ds.ds_tid;
 1355 kmp_task_t *__kmpc_omp_task_alloc(ident_t *loc_ref, kmp_int32 gtid,
 1356                                   kmp_int32 flags, size_t sizeof_kmp_task_t,
 1380 kmp_task_t *__kmpc_omp_target_task_alloc(ident_t *loc_ref, kmp_int32 gtid,
 1381                                          kmp_int32 flags,
 1403 kmp_int32
 1404 __kmpc_omp_reg_task_with_affinity(ident_t *loc_ref, kmp_int32 gtid,
 1405                                   kmp_task_t *new_task, kmp_int32 naffins,
 1415 static void __kmp_invoke_task(kmp_int32 gtid, kmp_task_t *task,
 1530     kmp_int32 kmp_itt_count_task =
 1595 kmp_int32 __kmpc_omp_task_parts(ident_t *loc_ref, kmp_int32 gtid,
 1595 kmp_int32 __kmpc_omp_task_parts(ident_t *loc_ref, kmp_int32 gtid,
 1653 kmp_int32 __kmp_omp_task(kmp_int32 gtid, kmp_task_t *new_task,
 1653 kmp_int32 __kmp_omp_task(kmp_int32 gtid, kmp_task_t *new_task,
 1684 kmp_int32 __kmpc_omp_task(ident_t *loc_ref, kmp_int32 gtid,
 1684 kmp_int32 __kmpc_omp_task(ident_t *loc_ref, kmp_int32 gtid,
 1686   kmp_int32 res;
 1750 kmp_int32 __kmp_omp_taskloop_task(ident_t *loc_ref, kmp_int32 gtid,
 1750 kmp_int32 __kmp_omp_taskloop_task(ident_t *loc_ref, kmp_int32 gtid,
 1752   kmp_int32 res;
 1793 static kmp_int32 __kmpc_omp_taskwait_template(ident_t *loc_ref, kmp_int32 gtid,
 1793 static kmp_int32 __kmpc_omp_taskwait_template(ident_t *loc_ref, kmp_int32 gtid,
 1898 static kmp_int32 __kmpc_omp_taskwait_ompt(ident_t *loc_ref, kmp_int32 gtid,
 1898 static kmp_int32 __kmpc_omp_taskwait_ompt(ident_t *loc_ref, kmp_int32 gtid,
 1908 kmp_int32 __kmpc_omp_taskwait(ident_t *loc_ref, kmp_int32 gtid) {
 1908 kmp_int32 __kmpc_omp_taskwait(ident_t *loc_ref, kmp_int32 gtid) {
 1920 kmp_int32 __kmpc_omp_taskyield(ident_t *loc_ref, kmp_int32 gtid, int end_part) {
 1920 kmp_int32 __kmpc_omp_taskyield(ident_t *loc_ref, kmp_int32 gtid, int end_part) {
 2087   kmp_int32 nth = thread->th.th_team_nproc;
 2201   kmp_int32 nth = thread->th.th_team_nproc;
 2210   kmp_int32 num = tg->reduce_num_data;
 2211   kmp_int32 tid = thread->th.th_info.ds.ds_tid;
 2257   kmp_int32 nth = th->th.th_team_nproc;
 2260   kmp_int32 num = tg->reduce_num_data;
 2306   kmp_int32 nth = thr->th.th_team_nproc;
 2572 static kmp_task_t *__kmp_remove_my_task(kmp_info_t *thread, kmp_int32 gtid,
 2574                                         kmp_int32 is_constrained) {
 2644 static kmp_task_t *__kmp_steal_task(kmp_info_t *victim_thr, kmp_int32 gtid,
 2646                                     std::atomic<kmp_int32> *unfinished_threads,
 2648                                     kmp_int32 is_constrained) {
 2653   kmp_int32 target;
 2654   kmp_int32 victim_tid;
 2747     kmp_int32 count;
 2784     kmp_info_t *thread, kmp_int32 gtid, C *flag, int final_spin,
 2786     kmp_int32 is_constrained) {
 2792   std::atomic<kmp_int32> *unfinished_threads;
 2793   kmp_int32 nthreads, victim_tid = -2, use_own_tasks = 1, new_victim = 0,
 2946         kmp_int32 count;
 2990     kmp_info_t *thread, kmp_int32 gtid, kmp_flag_32 *flag, int final_spin,
 2992     kmp_int32 is_constrained) {
 2999     kmp_info_t *thread, kmp_int32 gtid, kmp_flag_64 *flag, int final_spin,
 3001     kmp_int32 is_constrained) {
 3008     kmp_info_t *thread, kmp_int32 gtid, kmp_flag_oncore *flag, int final_spin,
 3010     kmp_int32 is_constrained) {
 3179   kmp_int32 nthreads, maxthreads;
 3618 static bool __kmp_give_task(kmp_info_t *thread, kmp_int32 tid, kmp_task_t *task,
 3619                             kmp_int32 pass) {
 3727   kmp_int32 children = 0;
 3738 static void __kmp_bottom_half_finish_proxy(kmp_int32 gtid, kmp_task_t *ptask) {
 3763 void __kmpc_proxy_task_completed(kmp_int32 gtid, kmp_task_t *ptask) {
 3804   kmp_int32 nthreads = team->t.t_nproc;
 3809   kmp_int32 start_k = 0;
 3810   kmp_int32 pass = 1;
 3811   kmp_int32 k = start_k;
 3956 typedef void (*p_task_dup_t)(kmp_task_t *, kmp_task_t *, kmp_int32);
 3991         kmp_int32 *lb = RCAST(kmp_int32 *, task->shareds);
 4012         kmp_int32 *ub = RCAST(kmp_int32 *, task->shareds) + 1;
 4099   kmp_int32 lastpriv = 0;
projects/openmp/runtime/src/kmp_threadprivate.cpp
  540 void *__kmpc_threadprivate(ident_t *loc, kmp_int32 global_tid, void *data,
  616                             kmp_int32 global_tid, // gtid.
projects/openmp/runtime/src/kmp_wait_release.h
  468         std::atomic<kmp_int32> *unfinished_threads =
  748   int execute_tasks(kmp_info_t *this_thr, kmp_int32 gtid, int final_spin,
  750                     kmp_int32 is_constrained) {
  777   int execute_tasks(kmp_info_t *this_thr, kmp_int32 gtid, int final_spin,
  779                     kmp_int32 is_constrained) {
  900   int execute_tasks(kmp_info_t *this_thr, kmp_int32 gtid, int final_spin,
  902                     kmp_int32 is_constrained) {
projects/openmp/runtime/src/ompt-specific.cpp
  497   kmp_int32 flags = thr->th.th_ident->flags;
projects/openmp/runtime/src/z_Linux_util.cpp
  472 static kmp_int32 __kmp_set_stack_info(int gtid, kmp_info_t *th) {