1 From e4ddd92318e50d8fad1f91fc07c5f6bacb9d6b21 Mon Sep 17 00:00:00 2001
2 From: Daniel Golle <daniel@makrotopia.org>
3 Date: Mon May 25 16:02:49 2015 -0400
4 Subject: [PATCH] squashed commits since v1.1.9
6 967bcbf mark mips crt code as code
7 7b75c48 mark mips cancellable syscall code as code
8 0e0e494 simplify/shrink relocation processing in dynamic linker stage 1
9 09db855 remove processing of DT_JMPREL from dynamic linker stage 1 bootstrap
10 9f26ebd fix stack alignment code in mips crt_arch.h
11 63caf1d add .text section directive to all crt_arch.h files missing it
12 3b0e832 remove outdated and misleading comment in iconv.c
13 39b8ce6 in iconv_open, accept "CHAR" and "" as aliases for "UTF-8"
14 c648cef fix inconsistency in a_and and a_or argument types on x86[_64]
15 390f93e inline llsc atomics when building for sh4a
16 c093e2e reprocess libc/ldso RELA relocations in stage 3 of dynamic linking
17 43e9f65 fix null pointer dereference in dcngettext under specific conditions
18 68630b5 eliminate costly tricks to avoid TLS access for current locale state
19 707d7c3 in i386 __set_thread_area, don't assume %gs register is initially zero
20 c0f10cf make arm reloc.h CRTJMP macro compatible with thumb
21 83340c7 make arm crt_arch.h compatible with thumb code generation
23 arch/aarch64/crt_arch.h | 1 +
24 arch/arm/crt_arch.h | 11 +--
25 arch/arm/reloc.h | 5 ++
26 arch/i386/atomic.h | 8 +--
27 arch/microblaze/crt_arch.h | 1 +
28 arch/mips/crt_arch.h | 5 +-
29 arch/or1k/crt_arch.h | 1 +
30 arch/powerpc/crt_arch.h | 1 +
31 arch/sh/atomic.h | 83 ++++++++++++++++++++++
32 arch/sh/crt_arch.h | 1 +
33 arch/sh/src/atomic.c | 135 ++++++++++++------------------------
34 arch/x32/atomic.h | 8 +--
35 arch/x86_64/atomic.h | 8 +--
38 src/internal/libc.h | 2 -
39 src/internal/locale_impl.h | 6 +-
40 src/ldso/dlstart.c | 41 +++++------
41 src/ldso/dynlink.c | 2 +-
42 src/locale/dcngettext.c | 2 +-
43 src/locale/iconv.c | 9 +--
44 src/locale/setlocale.c | 7 +-
45 src/locale/uselocale.c | 10 +--
46 src/thread/i386/__set_thread_area.s | 13 ++--
47 src/thread/mips/syscall_cp.s | 3 +
48 src/thread/pthread_create.c | 6 --
49 26 files changed, 200 insertions(+), 173 deletions(-)
51 diff --git a/arch/aarch64/crt_arch.h b/arch/aarch64/crt_arch.h
52 index 3a4b321..b64fb3d 100644
53 --- a/arch/aarch64/crt_arch.h
54 +++ b/arch/aarch64/crt_arch.h
59 ".type " START ",%function\n"
61 diff --git a/arch/arm/crt_arch.h b/arch/arm/crt_arch.h
62 index d1f9a66..99508b1 100644
63 --- a/arch/arm/crt_arch.h
64 +++ b/arch/arm/crt_arch.h
68 ".global " START " \n"
69 ".type " START ",%function \n"
75 -"2: add a2, pc, a2 \n"
76 -" and sp, sp, #-16 \n"
79 +"2: and ip, a1, #-16 \n"
84 -"1: .word _DYNAMIC-2b-8 \n"
86 +"1: .word _DYNAMIC-2b \n"
88 diff --git a/arch/arm/reloc.h b/arch/arm/reloc.h
89 index dec0031..e1ef350 100644
90 --- a/arch/arm/reloc.h
91 +++ b/arch/arm/reloc.h
93 #define REL_TPOFF R_ARM_TLS_TPOFF32
94 //#define REL_TLSDESC R_ARM_TLS_DESC
97 +#define CRTJMP(pc,sp) __asm__ __volatile__( \
98 + "mov sp,%1 ; bx %0" : : "r"(pc), "r"(sp) : "memory" )
100 #define CRTJMP(pc,sp) __asm__ __volatile__( \
101 "mov sp,%1 ; tst %0,#1 ; moveq pc,%0 ; bx %0" : : "r"(pc), "r"(sp) : "memory" )
103 diff --git a/arch/i386/atomic.h b/arch/i386/atomic.h
104 index 4fe7bde..95fecbd 100644
105 --- a/arch/i386/atomic.h
106 +++ b/arch/i386/atomic.h
107 @@ -50,16 +50,16 @@ static inline int a_cas(volatile int *p, int t, int s)
111 -static inline void a_or(volatile void *p, int v)
112 +static inline void a_or(volatile int *p, int v)
114 __asm__( "lock ; orl %1, %0"
115 - : "=m"(*(int *)p) : "r"(v) : "memory" );
116 + : "=m"(*p) : "r"(v) : "memory" );
119 -static inline void a_and(volatile void *p, int v)
120 +static inline void a_and(volatile int *p, int v)
122 __asm__( "lock ; andl %1, %0"
123 - : "=m"(*(int *)p) : "r"(v) : "memory" );
124 + : "=m"(*p) : "r"(v) : "memory" );
127 static inline int a_swap(volatile int *x, int v)
128 diff --git a/arch/microblaze/crt_arch.h b/arch/microblaze/crt_arch.h
129 index ada98c8..bca78bf 100644
130 --- a/arch/microblaze/crt_arch.h
131 +++ b/arch/microblaze/crt_arch.h
135 ".global " START " \n"
138 diff --git a/arch/mips/crt_arch.h b/arch/mips/crt_arch.h
139 index 9a60be0..21e139b 100644
140 --- a/arch/mips/crt_arch.h
141 +++ b/arch/mips/crt_arch.h
147 ".global _" START "\n"
148 ".global " START "\n"
149 ".type _" START ", @function\n"
150 @@ -21,8 +22,8 @@ __asm__(
151 " addu $5, $5, $gp \n"
153 " addu $25, $25, $gp \n"
154 -" subu $sp, $sp, 16 \n"
155 +" and $sp, $sp, -8 \n"
157 -" and $sp, $sp, -8 \n"
158 +" subu $sp, $sp, 16 \n"
161 diff --git a/arch/or1k/crt_arch.h b/arch/or1k/crt_arch.h
162 index 8441556..9e310ca 100644
163 --- a/arch/or1k/crt_arch.h
164 +++ b/arch/or1k/crt_arch.h
168 ".global " START " \n"
171 diff --git a/arch/powerpc/crt_arch.h b/arch/powerpc/crt_arch.h
172 index ec3cd29..9b65886 100644
173 --- a/arch/powerpc/crt_arch.h
174 +++ b/arch/powerpc/crt_arch.h
178 ".global " START " \n"
179 ".type " START ", %function \n"
181 diff --git a/arch/sh/atomic.h b/arch/sh/atomic.h
182 index a1d22e4..f2e6dac 100644
183 --- a/arch/sh/atomic.h
184 +++ b/arch/sh/atomic.h
185 @@ -22,6 +22,88 @@ static inline int a_ctz_64(uint64_t x)
189 +#define LLSC_CLOBBERS "r0", "t", "memory"
190 +#define LLSC_START(mem) "synco\n" \
191 + "0: movli.l @" mem ", r0\n"
192 +#define LLSC_END(mem) \
193 + "1: movco.l r0, @" mem "\n" \
197 +static inline int __sh_cas_llsc(volatile int *p, int t, int s)
200 + __asm__ __volatile__(
207 + : "=&r"(old) : "r"(p), "r"(t), "r"(s) : LLSC_CLOBBERS);
211 +static inline int __sh_swap_llsc(volatile int *x, int v)
214 + __asm__ __volatile__(
219 + : "=&r"(old) : "r"(x), "r"(v) : LLSC_CLOBBERS);
223 +static inline int __sh_fetch_add_llsc(volatile int *x, int v)
226 + __asm__ __volatile__(
231 + : "=&r"(old) : "r"(x), "r"(v) : LLSC_CLOBBERS);
235 +static inline void __sh_store_llsc(volatile int *p, int x)
237 + __asm__ __volatile__(
241 + : : "r"(p), "r"(x) : "memory");
244 +static inline void __sh_and_llsc(volatile int *x, int v)
246 + __asm__ __volatile__(
250 + : : "r"(x), "r"(v) : LLSC_CLOBBERS);
253 +static inline void __sh_or_llsc(volatile int *x, int v)
255 + __asm__ __volatile__(
259 + : : "r"(x), "r"(v) : LLSC_CLOBBERS);
263 +#define a_cas(p,t,s) __sh_cas_llsc(p,t,s)
264 +#define a_swap(x,v) __sh_swap_llsc(x,v)
265 +#define a_fetch_add(x,v) __sh_fetch_add_llsc(x, v)
266 +#define a_store(x,v) __sh_store_llsc(x, v)
267 +#define a_and(x,v) __sh_and_llsc(x, v)
268 +#define a_or(x,v) __sh_or_llsc(x, v)
271 int __sh_cas(volatile int *, int, int);
272 int __sh_swap(volatile int *, int);
273 int __sh_fetch_add(volatile int *, int);
274 @@ -35,6 +117,7 @@ void __sh_or(volatile int *, int);
275 #define a_store(x,v) __sh_store(x, v)
276 #define a_and(x,v) __sh_and(x, v)
277 #define a_or(x,v) __sh_or(x, v)
280 static inline void *a_cas_p(volatile void *p, void *t, void *s)
282 diff --git a/arch/sh/crt_arch.h b/arch/sh/crt_arch.h
283 index a873ffd..f890710 100644
284 --- a/arch/sh/crt_arch.h
285 +++ b/arch/sh/crt_arch.h
289 ".global " START " \n"
292 diff --git a/arch/sh/src/atomic.c b/arch/sh/src/atomic.c
293 index 1339567..f8c615f 100644
294 --- a/arch/sh/src/atomic.c
295 +++ b/arch/sh/src/atomic.c
300 -#define LLSC_CLOBBERS "r0", "t", "memory"
301 -#define LLSC_START(mem) "synco\n" \
302 - "0: movli.l @" mem ", r0\n"
303 -#define LLSC_END(mem) \
304 - "1: movco.l r0, @" mem "\n" \
310 /* gusa is a hack in the kernel which lets you create a sequence of instructions
311 * which will be restarted if the process is preempted in the middle of the
314 int __sh_cas(volatile int *p, int t, int s)
316 + if (__hwcap & CPU_HAS_LLSC) return __sh_cas_llsc(p, t, s);
319 - if (__hwcap & CPU_HAS_LLSC) {
320 - __asm__ __volatile__(
327 - : "=&r"(old) : "r"(p), "r"(t), "r"(s) : LLSC_CLOBBERS);
329 - __asm__ __volatile__(
330 - GUSA_START_EVEN("%1", "%0")
333 - GUSA_END("%1", "%3")
334 - : "=&r"(old) : "r"(p), "r"(t), "r"(s) : GUSA_CLOBBERS, "t");
336 + __asm__ __volatile__(
337 + GUSA_START_EVEN("%1", "%0")
340 + GUSA_END("%1", "%3")
341 + : "=&r"(old) : "r"(p), "r"(t), "r"(s) : GUSA_CLOBBERS, "t");
345 int __sh_swap(volatile int *x, int v)
347 + if (__hwcap & CPU_HAS_LLSC) return __sh_swap_llsc(x, v);
350 - if (__hwcap & CPU_HAS_LLSC) {
351 - __asm__ __volatile__(
356 - : "=&r"(old) : "r"(x), "r"(v) : LLSC_CLOBBERS);
358 - __asm__ __volatile__(
359 - GUSA_START_EVEN("%1", "%0")
360 - GUSA_END("%1", "%2")
361 - : "=&r"(old) : "r"(x), "r"(v) : GUSA_CLOBBERS);
363 + __asm__ __volatile__(
364 + GUSA_START_EVEN("%1", "%0")
365 + GUSA_END("%1", "%2")
366 + : "=&r"(old) : "r"(x), "r"(v) : GUSA_CLOBBERS);
370 int __sh_fetch_add(volatile int *x, int v)
372 + if (__hwcap & CPU_HAS_LLSC) return __sh_fetch_add_llsc(x, v);
375 - if (__hwcap & CPU_HAS_LLSC) {
376 - __asm__ __volatile__(
381 - : "=&r"(old) : "r"(x), "r"(v) : LLSC_CLOBBERS);
383 - __asm__ __volatile__(
384 - GUSA_START_EVEN("%2", "%0")
387 - GUSA_END("%2", "%1")
388 - : "=&r"(old), "=&r"(dummy) : "r"(x), "r"(v) : GUSA_CLOBBERS);
390 + __asm__ __volatile__(
391 + GUSA_START_EVEN("%2", "%0")
394 + GUSA_END("%2", "%1")
395 + : "=&r"(old), "=&r"(dummy) : "r"(x), "r"(v) : GUSA_CLOBBERS);
399 void __sh_store(volatile int *p, int x)
401 - if (__hwcap & CPU_HAS_LLSC) {
402 - __asm__ __volatile__(
406 - : : "r"(p), "r"(x) : "memory");
408 - __asm__ __volatile__(
410 - : : "r"(p), "r"(x) : "memory");
412 + if (__hwcap & CPU_HAS_LLSC) return __sh_store_llsc(p, x);
413 + __asm__ __volatile__(
415 + : : "r"(p), "r"(x) : "memory");
418 void __sh_and(volatile int *x, int v)
420 + if (__hwcap & CPU_HAS_LLSC) return __sh_and_llsc(x, v);
423 - if (__hwcap & CPU_HAS_LLSC) {
424 - __asm__ __volatile__(
428 - : : "r"(x), "r"(v) : LLSC_CLOBBERS);
430 - __asm__ __volatile__(
431 - GUSA_START_ODD("%1", "%0")
433 - GUSA_END("%1", "%0")
434 - : "=&r"(dummy) : "r"(x), "r"(v) : GUSA_CLOBBERS);
436 + __asm__ __volatile__(
437 + GUSA_START_ODD("%1", "%0")
439 + GUSA_END("%1", "%0")
440 + : "=&r"(dummy) : "r"(x), "r"(v) : GUSA_CLOBBERS);
443 void __sh_or(volatile int *x, int v)
445 + if (__hwcap & CPU_HAS_LLSC) return __sh_or_llsc(x, v);
448 - if (__hwcap & CPU_HAS_LLSC) {
449 - __asm__ __volatile__(
453 - : : "r"(x), "r"(v) : LLSC_CLOBBERS);
455 - __asm__ __volatile__(
456 - GUSA_START_ODD("%1", "%0")
458 - GUSA_END("%1", "%0")
459 - : "=&r"(dummy) : "r"(x), "r"(v) : GUSA_CLOBBERS);
461 + __asm__ __volatile__(
462 + GUSA_START_ODD("%1", "%0")
464 + GUSA_END("%1", "%0")
465 + : "=&r"(dummy) : "r"(x), "r"(v) : GUSA_CLOBBERS);
469 diff --git a/arch/x32/atomic.h b/arch/x32/atomic.h
470 index 333098c..b2014cc 100644
471 --- a/arch/x32/atomic.h
472 +++ b/arch/x32/atomic.h
473 @@ -47,16 +47,16 @@ static inline int a_cas(volatile int *p, int t, int s)
477 -static inline void a_or(volatile void *p, int v)
478 +static inline void a_or(volatile int *p, int v)
480 __asm__( "lock ; or %1, %0"
481 - : "=m"(*(int *)p) : "r"(v) : "memory" );
482 + : "=m"(*p) : "r"(v) : "memory" );
485 -static inline void a_and(volatile void *p, int v)
486 +static inline void a_and(volatile int *p, int v)
488 __asm__( "lock ; and %1, %0"
489 - : "=m"(*(int *)p) : "r"(v) : "memory" );
490 + : "=m"(*p) : "r"(v) : "memory" );
493 static inline int a_swap(volatile int *x, int v)
494 diff --git a/arch/x86_64/atomic.h b/arch/x86_64/atomic.h
495 index 333098c..b2014cc 100644
496 --- a/arch/x86_64/atomic.h
497 +++ b/arch/x86_64/atomic.h
498 @@ -47,16 +47,16 @@ static inline int a_cas(volatile int *p, int t, int s)
502 -static inline void a_or(volatile void *p, int v)
503 +static inline void a_or(volatile int *p, int v)
505 __asm__( "lock ; or %1, %0"
506 - : "=m"(*(int *)p) : "r"(v) : "memory" );
507 + : "=m"(*p) : "r"(v) : "memory" );
510 -static inline void a_and(volatile void *p, int v)
511 +static inline void a_and(volatile int *p, int v)
513 __asm__( "lock ; and %1, %0"
514 - : "=m"(*(int *)p) : "r"(v) : "memory" );
515 + : "=m"(*p) : "r"(v) : "memory" );
518 static inline int a_swap(volatile int *x, int v)
519 diff --git a/crt/mips/crt1.s b/crt/mips/crt1.s
520 index 093d7d5..794b6f7 100644
521 --- a/crt/mips/crt1.s
522 +++ b/crt/mips/crt1.s
527 +.type __start,@function
528 +.type _start,@function
531 subu $fp, $fp, $fp # Zero the frame pointer.
532 diff --git a/crt/mips/crti.s b/crt/mips/crti.s
533 index b1593d1..39dee38 100644
534 --- a/crt/mips/crti.s
535 +++ b/crt/mips/crti.s
540 +.type _init,@function
544 @@ -10,6 +11,7 @@ _init:
548 +.type _fini,@function
552 diff --git a/src/internal/libc.h b/src/internal/libc.h
553 index 51ee186..212f0e8 100644
554 --- a/src/internal/libc.h
555 +++ b/src/internal/libc.h
556 @@ -23,8 +23,6 @@ struct __libc {
557 volatile int ofl_lock[2];
560 - volatile int uselocale_cnt;
561 - volatile int bytelocale_cnt_minus_1;
562 struct __locale_struct global_locale;
565 diff --git a/src/internal/locale_impl.h b/src/internal/locale_impl.h
566 index 9142f0c..5aebbf6 100644
567 --- a/src/internal/locale_impl.h
568 +++ b/src/internal/locale_impl.h
569 @@ -20,11 +20,9 @@ const char *__lctrans_cur(const char *);
570 #define LCTRANS(msg, lc, loc) __lctrans(msg, (loc)->cat[(lc)-2])
571 #define LCTRANS_CUR(msg) __lctrans_cur(msg)
573 -#define CURRENT_LOCALE \
574 - (libc.uselocale_cnt ? __pthread_self()->locale : &libc.global_locale)
575 +#define CURRENT_LOCALE (__pthread_self()->locale)
577 -#define CURRENT_UTF8 \
578 - (libc.bytelocale_cnt_minus_1<0 || __pthread_self()->locale->ctype_utf8)
579 +#define CURRENT_UTF8 (__pthread_self()->locale->ctype_utf8)
582 #define MB_CUR_MAX (CURRENT_UTF8 ? 4 : 1)
583 diff --git a/src/ldso/dlstart.c b/src/ldso/dlstart.c
584 index 46f4a5c..5f84465 100644
585 --- a/src/ldso/dlstart.c
586 +++ b/src/ldso/dlstart.c
587 @@ -56,31 +56,22 @@ void _dlstart_c(size_t *sp, size_t *dynv)
588 for (i=0; i<local_cnt; i++) got[i] += (size_t)base;
591 - /* The use of the reloc_info structure and nested loops is a trick
592 - * to work around the fact that we can't necessarily make function
593 - * calls yet. Each struct in the array serves like the arguments
594 - * to a function call. */
600 - { base+dyn[DT_JMPREL], dyn[DT_PLTRELSZ], 2+(dyn[DT_PLTREL]==DT_RELA) },
601 - { base+dyn[DT_REL], dyn[DT_RELSZ], 2 },
602 - { base+dyn[DT_RELA], dyn[DT_RELASZ], 3 },
606 - for (i=0; reloc_info[i].stride; i++) {
607 - size_t *rel = reloc_info[i].rel;
608 - size_t rel_size = reloc_info[i].size;
609 - size_t stride = reloc_info[i].stride;
610 - for (; rel_size; rel+=stride, rel_size-=stride*sizeof(size_t)) {
611 - if (!IS_RELATIVE(rel[1])) continue;
612 - size_t *rel_addr = (void *)(base + rel[0]);
613 - size_t addend = stride==3 ? rel[2] : *rel_addr;
614 - *rel_addr = (size_t)base + addend;
616 + size_t *rel, rel_size;
618 + rel = (void *)(base+dyn[DT_REL]);
619 + rel_size = dyn[DT_RELSZ];
620 + for (; rel_size; rel+=2, rel_size-=2*sizeof(size_t)) {
621 + if (!IS_RELATIVE(rel[1])) continue;
622 + size_t *rel_addr = (void *)(base + rel[0]);
623 + *rel_addr += (size_t)base;
626 + rel = (void *)(base+dyn[DT_RELA]);
627 + rel_size = dyn[DT_RELASZ];
628 + for (; rel_size; rel+=3, rel_size-=3*sizeof(size_t)) {
629 + if (!IS_RELATIVE(rel[1])) continue;
630 + size_t *rel_addr = (void *)(base + rel[0]);
631 + *rel_addr = (size_t)base + rel[2];
634 const char *strings = (void *)(base + dyn[DT_STRTAB]);
635 diff --git a/src/ldso/dynlink.c b/src/ldso/dynlink.c
636 index 7c92ef6..93595a0 100644
637 --- a/src/ldso/dynlink.c
638 +++ b/src/ldso/dynlink.c
639 @@ -281,7 +281,7 @@ static void do_relocs(struct dso *dso, size_t *rel, size_t rel_size, size_t stri
642 int gotplt = (type == REL_GOT || type == REL_PLT);
643 - if (dso->rel_update_got && !gotplt) continue;
644 + if (dso->rel_update_got && !gotplt && stride==2) continue;
646 addend = stride>2 ? rel[2]
647 : gotplt || type==REL_COPY ? 0
648 diff --git a/src/locale/dcngettext.c b/src/locale/dcngettext.c
649 index 0057cb5..30dd41d 100644
650 --- a/src/locale/dcngettext.c
651 +++ b/src/locale/dcngettext.c
652 @@ -132,7 +132,7 @@ char *dcngettext(const char *domainname, const char *msgid1, const char *msgid2,
655 locname = loc->messages_name;
656 - if (!*locname) goto notrans;
657 + if (!locname || !*locname) goto notrans;
661 diff --git a/src/locale/iconv.c b/src/locale/iconv.c
662 index a0b0232..e6121ae 100644
663 --- a/src/locale/iconv.c
664 +++ b/src/locale/iconv.c
669 -/* FIXME: these are not implemented yet
671 - * GBK: 81-FE 40-7E,80-FE
672 - * Big5: A1-FE 40-7E,A1-FE
675 /* Definitions of charmaps. Each charmap consists of:
676 * 1. Empty-string-terminated list of null-terminated aliases.
677 * 2. Special type code or number of elided entries.
678 * 3. Character table (size determined by field 2). */
680 static const unsigned char charmaps[] =
682 +"utf8\0char\0\0\310"
684 "ucs2\0ucs2be\0\0\304"
686 @@ -90,6 +84,7 @@ static int fuzzycmp(const unsigned char *a, const unsigned char *b)
687 static size_t find_charmap(const void *name)
689 const unsigned char *s;
690 + if (!*(char *)name) name=charmaps; /* "utf8" */
691 for (s=charmaps; *s; ) {
692 if (!fuzzycmp(name, s)) {
693 for (; *s; s+=strlen((void *)s)+1);
694 diff --git a/src/locale/setlocale.c b/src/locale/setlocale.c
695 index 8ea389a..d797f43 100644
696 --- a/src/locale/setlocale.c
697 +++ b/src/locale/setlocale.c
698 @@ -55,12 +55,7 @@ char *setlocale(int cat, const char *name)
703 - int adj = libc.global_locale.ctype_utf8;
704 - __setlocalecat(&libc.global_locale, cat, name);
705 - adj -= libc.global_locale.ctype_utf8;
706 - if (adj) a_fetch_add(&libc.bytelocale_cnt_minus_1, adj);
708 + if (name) __setlocalecat(&libc.global_locale, cat, name);
712 diff --git a/src/locale/uselocale.c b/src/locale/uselocale.c
713 index 5106795..b70a0c1 100644
714 --- a/src/locale/uselocale.c
715 +++ b/src/locale/uselocale.c
716 @@ -10,15 +10,7 @@ locale_t __uselocale(locale_t new)
718 if (new == LC_GLOBAL_LOCALE) new = global;
720 - if (new && new != old) {
722 - if (new == global) a_dec(&libc.uselocale_cnt);
723 - else if (!new->ctype_utf8) adj++;
724 - if (old == global) a_inc(&libc.uselocale_cnt);
725 - else if (!old->ctype_utf8) adj--;
726 - a_fetch_add(&libc.bytelocale_cnt_minus_1, adj);
727 - self->locale = new;
729 + self->locale = new;
731 return old == global ? LC_GLOBAL_LOCALE : old;
733 diff --git a/src/thread/i386/__set_thread_area.s b/src/thread/i386/__set_thread_area.s
734 index 1d85268..3a558fb 100644
735 --- a/src/thread/i386/__set_thread_area.s
736 +++ b/src/thread/i386/__set_thread_area.s
737 @@ -6,10 +6,10 @@ __set_thread_area:
746 +1: addl $4f-1b,(%esp)
752 @@ -18,6 +18,7 @@ __set_thread_area:
760 @@ -38,3 +39,7 @@ __set_thread_area:
768 diff --git a/src/thread/mips/syscall_cp.s b/src/thread/mips/syscall_cp.s
769 index 399289e..8f76d40 100644
770 --- a/src/thread/mips/syscall_cp.s
771 +++ b/src/thread/mips/syscall_cp.s
776 +.type __cp_begin,@function
779 +.type __cp_end,@function
782 +.type __cp_cancel,@function
784 .global __syscall_cp_asm
785 .hidden __syscall_cp_asm
786 diff --git a/src/thread/pthread_create.c b/src/thread/pthread_create.c
787 index 4eb8b88..de72818 100644
788 --- a/src/thread/pthread_create.c
789 +++ b/src/thread/pthread_create.c
790 @@ -67,12 +67,6 @@ _Noreturn void __pthread_exit(void *result)
794 - if (self->locale != &libc.global_locale) {
795 - a_dec(&libc.uselocale_cnt);
796 - if (self->locale->ctype_utf8)
797 - a_dec(&libc.bytelocale_cnt_minus_1);
800 /* Process robust list in userspace to handle non-pshared mutexes
801 * and the detached thread case where the robust list head will
802 * be invalid when the kernel would process it. */