diff options
| -rw-r--r-- | ipc/sem.c | 10 |
1 files changed, 3 insertions, 7 deletions
| @@ -895,7 +895,7 @@ static inline void lock_semundo(void) | |||
| 895 | struct sem_undo_list *undo_list; | 895 | struct sem_undo_list *undo_list; |
| 896 | 896 | ||
| 897 | undo_list = current->sysvsem.undo_list; | 897 | undo_list = current->sysvsem.undo_list; |
| 898 | if ((undo_list != NULL) && (atomic_read(&undo_list->refcnt) != 1)) | 898 | if (undo_list) |
| 899 | spin_lock(&undo_list->lock); | 899 | spin_lock(&undo_list->lock); |
| 900 | } | 900 | } |
| 901 | 901 | ||
| @@ -915,7 +915,7 @@ static inline void unlock_semundo(void) | |||
| 915 | struct sem_undo_list *undo_list; | 915 | struct sem_undo_list *undo_list; |
| 916 | 916 | ||
| 917 | undo_list = current->sysvsem.undo_list; | 917 | undo_list = current->sysvsem.undo_list; |
| 918 | if ((undo_list != NULL) && (atomic_read(&undo_list->refcnt) != 1)) | 918 | if (undo_list) |
| 919 | spin_unlock(&undo_list->lock); | 919 | spin_unlock(&undo_list->lock); |
| 920 | } | 920 | } |
| 921 | 921 | ||
| @@ -943,9 +943,7 @@ static inline int get_undo_list(struct sem_undo_list **undo_listp) | |||
| 943 | if (undo_list == NULL) | 943 | if (undo_list == NULL) |
| 944 | return -ENOMEM; | 944 | return -ENOMEM; |
| 945 | memset(undo_list, 0, size); | 945 | memset(undo_list, 0, size); |
| 946 | /* don't initialize unodhd->lock here. It's done | 946 | spin_lock_init(&undo_list->lock); |
| 947 | * in copy_semundo() instead. | ||
| 948 | */ | ||
| 949 | atomic_set(&undo_list->refcnt, 1); | 947 | atomic_set(&undo_list->refcnt, 1); |
| 950 | current->sysvsem.undo_list = undo_list; | 948 | current->sysvsem.undo_list = undo_list; |
| 951 | } | 949 | } |
| @@ -1231,8 +1229,6 @@ int copy_semundo(unsigned long clone_flags, struct task_struct *tsk) | |||
| 1231 | error = get_undo_list(&undo_list); | 1229 | error = get_undo_list(&undo_list); |
| 1232 | if (error) | 1230 | if (error) |
| 1233 | return error; | 1231 | return error; |
| 1234 | if (atomic_read(&undo_list->refcnt) == 1) | ||
| 1235 | spin_lock_init(&undo_list->lock); | ||
| 1236 | atomic_inc(&undo_list->refcnt); | 1232 | atomic_inc(&undo_list->refcnt); |
| 1237 | tsk->sysvsem.undo_list = undo_list; | 1233 | tsk->sysvsem.undo_list = undo_list; |
| 1238 | } else | 1234 | } else |
