libc: generate syscall stubs in one big file...

...all the better to switch to a genrule rather than checking in
generated source.

This also removes all the code in the script to deal with git,
rather than fix it. We won't need that where we're going.

Test: boots
Change-Id: I468ce019d4232a7ef27e5cb5cfd89f4c2fe4ecbd
diff --git a/libc/arch-x86/syscalls.S b/libc/arch-x86/syscalls.S
new file mode 100644
index 0000000..3cb68ac
--- /dev/null
+++ b/libc/arch-x86/syscalls.S
@@ -0,0 +1,7113 @@
+/* Generated by gensyscalls.py. Do not edit. */
+#include <private/bionic_asm.h>
+
+ENTRY(execve)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_execve, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(execve)
+
+ENTRY(getuid)
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    movl    $__NR_getuid32, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    ret
+END(getuid)
+
+ENTRY(getgid)
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    movl    $__NR_getgid32, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    ret
+END(getgid)
+
+ENTRY(geteuid)
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    movl    $__NR_geteuid32, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    ret
+END(geteuid)
+
+ENTRY(getegid)
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    movl    $__NR_getegid32, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    ret
+END(getegid)
+
+ENTRY(getresuid)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_getresuid32, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(getresuid)
+
+ENTRY(getresgid)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_getresgid32, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(getresgid)
+
+ENTRY(readahead)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_readahead, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(readahead)
+
+ENTRY(getgroups)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_getgroups32, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(getgroups)
+
+ENTRY(getpgid)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_getpgid, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(getpgid)
+
+ENTRY(getppid)
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    movl    $__NR_getppid, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    ret
+END(getppid)
+
+ENTRY(getsid)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_getsid, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(getsid)
+
+ENTRY(setsid)
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    movl    $__NR_setsid, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    ret
+END(setsid)
+
+ENTRY(setgid)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_setgid32, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(setgid)
+
+ENTRY(setuid)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_setuid32, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(setuid)
+
+ENTRY(setreuid)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_setreuid32, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(setreuid)
+
+ENTRY(setresuid)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_setresuid32, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(setresuid)
+
+ENTRY(setresgid)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_setresgid32, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(setresgid)
+
+ENTRY(__brk)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_brk, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(__brk)
+
+ENTRY(kill)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_kill, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(kill)
+
+ENTRY(tgkill)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_tgkill, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(tgkill)
+
+ENTRY(__ptrace)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_ptrace, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__ptrace)
+
+ENTRY(getrusage)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_getrusage, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(getrusage)
+
+ENTRY(__getpriority)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_getpriority, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__getpriority)
+
+ENTRY(setpriority)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_setpriority, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(setpriority)
+
+ENTRY(getrlimit)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_ugetrlimit, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(getrlimit)
+
+ENTRY(setrlimit)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_setrlimit, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(setrlimit)
+
+ENTRY(prlimit64)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_prlimit64, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(prlimit64)
+
+ENTRY(setgroups)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_setgroups32, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(setgroups)
+
+ENTRY(setpgid)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_setpgid, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(setpgid)
+
+ENTRY(setregid)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_setregid32, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(setregid)
+
+ENTRY(chroot)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_chroot, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(chroot)
+
+ENTRY(prctl)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     28(%esp), %ebx
+    mov     32(%esp), %ecx
+    mov     36(%esp), %edx
+    mov     40(%esp), %esi
+    mov     44(%esp), %edi
+    movl    $__NR_prctl, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(prctl)
+
+ENTRY(capget)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_capget, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(capget)
+
+ENTRY(capset)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_capset, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(capset)
+
+ENTRY(sigaltstack)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_sigaltstack, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(sigaltstack)
+
+ENTRY(acct)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_acct, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(acct)
+
+ENTRY(read)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_read, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(read)
+
+ENTRY(write)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_write, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(write)
+
+ENTRY(pread64)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     28(%esp), %ebx
+    mov     32(%esp), %ecx
+    mov     36(%esp), %edx
+    mov     40(%esp), %esi
+    mov     44(%esp), %edi
+    movl    $__NR_pread64, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(pread64)
+
+ENTRY(pwrite64)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     28(%esp), %ebx
+    mov     32(%esp), %ecx
+    mov     36(%esp), %edx
+    mov     40(%esp), %esi
+    mov     44(%esp), %edi
+    movl    $__NR_pwrite64, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(pwrite64)
+
+ENTRY(__preadv64)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     28(%esp), %ebx
+    mov     32(%esp), %ecx
+    mov     36(%esp), %edx
+    mov     40(%esp), %esi
+    mov     44(%esp), %edi
+    movl    $__NR_preadv, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__preadv64)
+
+ENTRY(__pwritev64)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     28(%esp), %ebx
+    mov     32(%esp), %ecx
+    mov     36(%esp), %edx
+    mov     40(%esp), %esi
+    mov     44(%esp), %edi
+    movl    $__NR_pwritev, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__pwritev64)
+
+ENTRY(___close)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_close, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(___close)
+.hidden ___close
+
+ENTRY(__getpid)
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    movl    $__NR_getpid, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    ret
+END(__getpid)
+
+ENTRY(munmap)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_munmap, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(munmap)
+
+ENTRY(___mremap)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     28(%esp), %ebx
+    mov     32(%esp), %ecx
+    mov     36(%esp), %edx
+    mov     40(%esp), %esi
+    mov     44(%esp), %edi
+    movl    $__NR_mremap, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(___mremap)
+.hidden ___mremap
+
+ENTRY(msync)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_msync, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(msync)
+
+ENTRY(mprotect)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_mprotect, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(mprotect)
+
+ENTRY(madvise)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_madvise, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(madvise)
+
+ENTRY(mlock)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_mlock, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(mlock)
+
+ENTRY(munlock)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_munlock, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(munlock)
+
+ENTRY(mlockall)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_mlockall, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(mlockall)
+
+ENTRY(munlockall)
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    movl    $__NR_munlockall, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    ret
+END(munlockall)
+
+ENTRY(mincore)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_mincore, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(mincore)
+
+ENTRY(__ioctl)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_ioctl, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__ioctl)
+
+ENTRY(readv)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_readv, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(readv)
+
+ENTRY(writev)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_writev, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(writev)
+
+ENTRY(__fcntl64)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_fcntl64, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__fcntl64)
+
+ENTRY(flock)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_flock, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(flock)
+
+ENTRY(___fchmod)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_fchmod, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(___fchmod)
+.hidden ___fchmod
+
+ENTRY(dup)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_dup, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(dup)
+
+ENTRY(pipe2)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_pipe2, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(pipe2)
+
+ENTRY(dup3)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_dup3, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(dup3)
+
+ENTRY(fsync)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_fsync, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(fsync)
+
+ENTRY(fdatasync)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_fdatasync, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(fdatasync)
+
+ENTRY(fchown)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_fchown32, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(fchown)
+
+ENTRY(sync)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_sync, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(sync)
+
+ENTRY(syncfs)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_syncfs, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(syncfs)
+
+ENTRY(___fsetxattr)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     28(%esp), %ebx
+    mov     32(%esp), %ecx
+    mov     36(%esp), %edx
+    mov     40(%esp), %esi
+    mov     44(%esp), %edi
+    movl    $__NR_fsetxattr, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(___fsetxattr)
+.hidden ___fsetxattr
+
+ENTRY(___fgetxattr)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_fgetxattr, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(___fgetxattr)
+.hidden ___fgetxattr
+
+ENTRY(___flistxattr)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_flistxattr, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(___flistxattr)
+.hidden ___flistxattr
+
+ENTRY(fremovexattr)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_fremovexattr, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(fremovexattr)
+
+ENTRY(__getdents64)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_getdents64, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__getdents64)
+
+ENTRY(__openat)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_openat, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__openat)
+
+ENTRY(___faccessat)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_faccessat, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(___faccessat)
+.hidden ___faccessat
+
+ENTRY(___fchmodat)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_fchmodat, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(___fchmodat)
+.hidden ___fchmodat
+
+ENTRY(fchownat)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     28(%esp), %ebx
+    mov     32(%esp), %ecx
+    mov     36(%esp), %edx
+    mov     40(%esp), %esi
+    mov     44(%esp), %edi
+    movl    $__NR_fchownat, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(fchownat)
+
+ENTRY(fstatat64)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_fstatat64, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(fstatat64)
+
+ALIAS_SYMBOL(fstatat, fstatat64)
+
+ENTRY(linkat)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     28(%esp), %ebx
+    mov     32(%esp), %ecx
+    mov     36(%esp), %edx
+    mov     40(%esp), %esi
+    mov     44(%esp), %edi
+    movl    $__NR_linkat, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(linkat)
+
+ENTRY(mkdirat)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_mkdirat, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(mkdirat)
+
+ENTRY(mknodat)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_mknodat, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(mknodat)
+
+ENTRY(readlinkat)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_readlinkat, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(readlinkat)
+
+ENTRY(renameat)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_renameat, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(renameat)
+
+ENTRY(symlinkat)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_symlinkat, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(symlinkat)
+
+ENTRY(unlinkat)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_unlinkat, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(unlinkat)
+
+ENTRY(utimensat)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_utimensat, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(utimensat)
+
+ENTRY(lseek)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_lseek, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(lseek)
+
+ENTRY(__llseek)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     28(%esp), %ebx
+    mov     32(%esp), %ecx
+    mov     36(%esp), %edx
+    mov     40(%esp), %esi
+    mov     44(%esp), %edi
+    movl    $__NR__llseek, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__llseek)
+
+ENTRY(ftruncate64)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_ftruncate64, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(ftruncate64)
+
+ENTRY(sendfile)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_sendfile, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(sendfile)
+
+ENTRY(sendfile64)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_sendfile64, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(sendfile64)
+
+ENTRY(truncate)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_truncate, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(truncate)
+
+ENTRY(truncate64)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_truncate64, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(truncate64)
+
+ENTRY(__mmap2)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+    pushl   %ebp
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ebp, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     32(%esp), %ebx
+    mov     36(%esp), %ecx
+    mov     40(%esp), %edx
+    mov     44(%esp), %esi
+    mov     48(%esp), %edi
+    mov     52(%esp), %ebp
+    movl    $__NR_mmap2, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebp
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__mmap2)
+
+ENTRY(fallocate64)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+    pushl   %ebp
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ebp, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     32(%esp), %ebx
+    mov     36(%esp), %ecx
+    mov     40(%esp), %edx
+    mov     44(%esp), %esi
+    mov     48(%esp), %edi
+    mov     52(%esp), %ebp
+    movl    $__NR_fallocate, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebp
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(fallocate64)
+
+ENTRY(__fadvise64)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+    pushl   %ebp
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ebp, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     32(%esp), %ebx
+    mov     36(%esp), %ecx
+    mov     40(%esp), %edx
+    mov     44(%esp), %esi
+    mov     48(%esp), %edi
+    mov     52(%esp), %ebp
+    movl    $__NR_fadvise64_64, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebp
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__fadvise64)
+
+ENTRY(__fstatfs64)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_fstatfs64, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__fstatfs64)
+
+ENTRY(__statfs64)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_statfs64, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__statfs64)
+
+ENTRY(fstat64)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_fstat64, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(fstat64)
+
+ALIAS_SYMBOL(fstat, fstat64)
+
+ENTRY(chdir)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_chdir, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(chdir)
+
+ENTRY(mount)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     28(%esp), %ebx
+    mov     32(%esp), %ecx
+    mov     36(%esp), %edx
+    mov     40(%esp), %esi
+    mov     44(%esp), %edi
+    movl    $__NR_mount, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(mount)
+
+ENTRY(umount2)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_umount2, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(umount2)
+
+ENTRY(__getcwd)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_getcwd, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__getcwd)
+
+ENTRY(fchdir)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_fchdir, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(fchdir)
+
+ENTRY(setxattr)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     28(%esp), %ebx
+    mov     32(%esp), %ecx
+    mov     36(%esp), %edx
+    mov     40(%esp), %esi
+    mov     44(%esp), %edi
+    movl    $__NR_setxattr, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(setxattr)
+
+ENTRY(lsetxattr)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     28(%esp), %ebx
+    mov     32(%esp), %ecx
+    mov     36(%esp), %edx
+    mov     40(%esp), %esi
+    mov     44(%esp), %edi
+    movl    $__NR_lsetxattr, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(lsetxattr)
+
+ENTRY(getxattr)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_getxattr, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(getxattr)
+
+ENTRY(lgetxattr)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_lgetxattr, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(lgetxattr)
+
+ENTRY(listxattr)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_listxattr, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(listxattr)
+
+ENTRY(llistxattr)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_llistxattr, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(llistxattr)
+
+ENTRY(removexattr)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_removexattr, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(removexattr)
+
+ENTRY(lremovexattr)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_lremovexattr, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(lremovexattr)
+
+ENTRY(swapon)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_swapon, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(swapon)
+
+ENTRY(swapoff)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_swapoff, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(swapoff)
+
+ENTRY(settimeofday)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_settimeofday, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(settimeofday)
+
+ENTRY(times)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_times, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(times)
+
+ENTRY(nanosleep)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_nanosleep, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(nanosleep)
+
+ENTRY(clock_settime)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_clock_settime, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(clock_settime)
+
+ENTRY(___clock_nanosleep)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_clock_nanosleep, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(___clock_nanosleep)
+.hidden ___clock_nanosleep
+
+ENTRY(getitimer)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_getitimer, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(getitimer)
+
+ENTRY(setitimer)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_setitimer, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(setitimer)
+
+ENTRY(__timer_create)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_timer_create, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__timer_create)
+
+ENTRY(__timer_settime)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_timer_settime, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__timer_settime)
+
+ENTRY(__timer_gettime)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_timer_gettime, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__timer_gettime)
+
+ENTRY(__timer_getoverrun)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_timer_getoverrun, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(__timer_getoverrun)
+
+ENTRY(__timer_delete)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_timer_delete, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(__timer_delete)
+
+ENTRY(timerfd_create)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_timerfd_create, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(timerfd_create)
+
+ENTRY(timerfd_settime)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_timerfd_settime, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(timerfd_settime)
+
+ENTRY(timerfd_gettime)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_timerfd_gettime, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(timerfd_gettime)
+
+ENTRY(adjtimex)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_adjtimex, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(adjtimex)
+
+ENTRY(clock_adjtime)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_clock_adjtime, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(clock_adjtime)
+
+ENTRY(__sigaction)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_sigaction, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__sigaction)
+
+ENTRY(__rt_sigaction)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_rt_sigaction, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__rt_sigaction)
+
+ENTRY(__rt_sigpending)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_rt_sigpending, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__rt_sigpending)
+
+ENTRY(__rt_sigprocmask)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_rt_sigprocmask, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__rt_sigprocmask)
+
+ENTRY(__rt_sigsuspend)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_rt_sigsuspend, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__rt_sigsuspend)
+
+ENTRY(__rt_sigtimedwait)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_rt_sigtimedwait, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__rt_sigtimedwait)
+
+ENTRY(___rt_sigqueueinfo)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_rt_sigqueueinfo, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(___rt_sigqueueinfo)
+.hidden ___rt_sigqueueinfo
+
+ENTRY(__signalfd4)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_signalfd4, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__signalfd4)
+
+ENTRY(__socket)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     $1, %ebx
+    mov     %esp, %ecx
+    addl    $16, %ecx
+    movl    $__NR_socketcall, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__socket)
+
+ENTRY(bind)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     $2, %ebx
+    mov     %esp, %ecx
+    addl    $16, %ecx
+    movl    $__NR_socketcall, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(bind)
+
+ENTRY(__connect)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     $3, %ebx
+    mov     %esp, %ecx
+    addl    $16, %ecx
+    movl    $__NR_socketcall, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__connect)
+
+ENTRY(listen)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     $4, %ebx
+    mov     %esp, %ecx
+    addl    $16, %ecx
+    movl    $__NR_socketcall, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(listen)
+
+ENTRY(getsockname)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     $6, %ebx
+    mov     %esp, %ecx
+    addl    $16, %ecx
+    movl    $__NR_socketcall, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(getsockname)
+
+ENTRY(getpeername)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     $7, %ebx
+    mov     %esp, %ecx
+    addl    $16, %ecx
+    movl    $__NR_socketcall, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(getpeername)
+
+ENTRY(socketpair)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     $8, %ebx
+    mov     %esp, %ecx
+    addl    $16, %ecx
+    movl    $__NR_socketcall, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(socketpair)
+
+ENTRY(sendto)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     $11, %ebx
+    mov     %esp, %ecx
+    addl    $16, %ecx
+    movl    $__NR_socketcall, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(sendto)
+
+ENTRY(recvfrom)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     $12, %ebx
+    mov     %esp, %ecx
+    addl    $16, %ecx
+    movl    $__NR_socketcall, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(recvfrom)
+
+ENTRY(shutdown)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     $13, %ebx
+    mov     %esp, %ecx
+    addl    $16, %ecx
+    movl    $__NR_socketcall, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(shutdown)
+
+ENTRY(setsockopt)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     $14, %ebx
+    mov     %esp, %ecx
+    addl    $16, %ecx
+    movl    $__NR_socketcall, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(setsockopt)
+
+ENTRY(getsockopt)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     $15, %ebx
+    mov     %esp, %ecx
+    addl    $16, %ecx
+    movl    $__NR_socketcall, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(getsockopt)
+
+ENTRY(sendmsg)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     $16, %ebx
+    mov     %esp, %ecx
+    addl    $16, %ecx
+    movl    $__NR_socketcall, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(sendmsg)
+
+ENTRY(recvmsg)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     $17, %ebx
+    mov     %esp, %ecx
+    addl    $16, %ecx
+    movl    $__NR_socketcall, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(recvmsg)
+
+ENTRY(__accept4)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     $18, %ebx
+    mov     %esp, %ecx
+    addl    $16, %ecx
+    movl    $__NR_socketcall, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__accept4)
+
+ENTRY(recvmmsg)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     $19, %ebx
+    mov     %esp, %ecx
+    addl    $16, %ecx
+    movl    $__NR_socketcall, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(recvmmsg)
+
+ENTRY(sendmmsg)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     $20, %ebx
+    mov     %esp, %ecx
+    addl    $16, %ecx
+    movl    $__NR_socketcall, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(sendmmsg)
+
+ENTRY(sched_setscheduler)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_sched_setscheduler, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(sched_setscheduler)
+
+ENTRY(sched_getscheduler)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_sched_getscheduler, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(sched_getscheduler)
+
+ENTRY(sched_yield)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_sched_yield, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(sched_yield)
+
+ENTRY(sched_setparam)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_sched_setparam, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(sched_setparam)
+
+ENTRY(sched_getparam)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_sched_getparam, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(sched_getparam)
+
+ENTRY(sched_get_priority_max)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_sched_get_priority_max, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(sched_get_priority_max)
+
+ENTRY(sched_get_priority_min)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_sched_get_priority_min, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(sched_get_priority_min)
+
+ENTRY(sched_rr_get_interval)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_sched_rr_get_interval, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(sched_rr_get_interval)
+
+ENTRY(sched_setaffinity)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_sched_setaffinity, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(sched_setaffinity)
+
+ENTRY(setns)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_setns, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(setns)
+
+ENTRY(unshare)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_unshare, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(unshare)
+
+ENTRY(__sched_getaffinity)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_sched_getaffinity, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__sched_getaffinity)
+
+ENTRY(__getcpu)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_getcpu, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__getcpu)
+
+ENTRY(uname)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_uname, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(uname)
+
+ENTRY(umask)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_umask, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(umask)
+
+ENTRY(__reboot)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_reboot, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__reboot)
+
+ENTRY(init_module)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_init_module, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(init_module)
+
+ENTRY(delete_module)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_delete_module, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(delete_module)
+
+ENTRY(klogctl)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_syslog, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(klogctl)
+
+ENTRY(sysinfo)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_sysinfo, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(sysinfo)
+
+ENTRY(personality)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_personality, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(personality)
+
+ENTRY(tee)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_tee, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(tee)
+
+ENTRY(splice)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+    pushl   %ebp
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ebp, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     32(%esp), %ebx
+    mov     36(%esp), %ecx
+    mov     40(%esp), %edx
+    mov     44(%esp), %esi
+    mov     48(%esp), %edi
+    mov     52(%esp), %ebp
+    movl    $__NR_splice, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebp
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(splice)
+
+ENTRY(vmsplice)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_vmsplice, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(vmsplice)
+
+ENTRY(epoll_create1)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_epoll_create1, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(epoll_create1)
+
+ENTRY(epoll_ctl)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_epoll_ctl, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(epoll_ctl)
+
+ENTRY(__epoll_pwait)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+    pushl   %ebp
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ebp, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     32(%esp), %ebx
+    mov     36(%esp), %ecx
+    mov     40(%esp), %edx
+    mov     44(%esp), %esi
+    mov     48(%esp), %edi
+    mov     52(%esp), %ebp
+    movl    $__NR_epoll_pwait, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebp
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__epoll_pwait)
+
+ENTRY(eventfd)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_eventfd2, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(eventfd)
+
+ENTRY(_exit)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_exit_group, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(_exit)
+
+ALIAS_SYMBOL(_Exit, _exit)
+
+ENTRY(__exit)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_exit, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(__exit)
+
+ENTRY(inotify_init1)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_inotify_init1, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(inotify_init1)
+
+ENTRY(inotify_add_watch)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_inotify_add_watch, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(inotify_add_watch)
+
+ENTRY(inotify_rm_watch)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_inotify_rm_watch, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(inotify_rm_watch)
+
+ENTRY(__pselect6)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+    pushl   %ebp
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ebp, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     32(%esp), %ebx
+    mov     36(%esp), %ecx
+    mov     40(%esp), %edx
+    mov     44(%esp), %esi
+    mov     48(%esp), %edi
+    mov     52(%esp), %ebp
+    movl    $__NR_pselect6, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebp
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__pselect6)
+
+ENTRY(__ppoll)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     28(%esp), %ebx
+    mov     32(%esp), %ecx
+    mov     36(%esp), %edx
+    mov     40(%esp), %esi
+    mov     44(%esp), %edi
+    movl    $__NR_ppoll, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__ppoll)
+
+ENTRY(process_vm_readv)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+    pushl   %ebp
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ebp, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     32(%esp), %ebx
+    mov     36(%esp), %ecx
+    mov     40(%esp), %edx
+    mov     44(%esp), %esi
+    mov     48(%esp), %edi
+    mov     52(%esp), %ebp
+    movl    $__NR_process_vm_readv, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebp
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(process_vm_readv)
+
+ENTRY(process_vm_writev)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+    pushl   %ebp
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ebp, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     32(%esp), %ebx
+    mov     36(%esp), %ecx
+    mov     40(%esp), %edx
+    mov     44(%esp), %esi
+    mov     48(%esp), %edi
+    mov     52(%esp), %ebp
+    movl    $__NR_process_vm_writev, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebp
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(process_vm_writev)
+
+ENTRY(quotactl)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_quotactl, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(quotactl)
+
+ENTRY(__set_tid_address)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_set_tid_address, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(__set_tid_address)
+
+ENTRY(setfsgid)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_setfsgid, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(setfsgid)
+
+ENTRY(setfsuid)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_setfsuid, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(setfsuid)
+
+ENTRY(setdomainname)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_setdomainname, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(setdomainname)
+
+ENTRY(sethostname)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_sethostname, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(sethostname)
+
+ENTRY(__sync_file_range)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+    pushl   %ebp
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ebp, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     32(%esp), %ebx
+    mov     36(%esp), %ecx
+    mov     40(%esp), %edx
+    mov     44(%esp), %esi
+    mov     48(%esp), %edi
+    mov     52(%esp), %ebp
+    movl    $__NR_sync_file_range, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebp
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__sync_file_range)
+
+ENTRY(wait4)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     24(%esp), %ebx
+    mov     28(%esp), %ecx
+    mov     32(%esp), %edx
+    mov     36(%esp), %esi
+    movl    $__NR_wait4, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(wait4)
+
+ENTRY(__waitid)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+    pushl   %esi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset esi, 0
+    pushl   %edi
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edi, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     28(%esp), %ebx
+    mov     32(%esp), %ecx
+    mov     36(%esp), %edx
+    mov     40(%esp), %esi
+    mov     44(%esp), %edi
+    movl    $__NR_waitid, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edi
+    popl    %esi
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__waitid)
+
+ENTRY(__set_thread_area)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     12(%esp), %ebx
+    movl    $__NR_set_thread_area, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ebx
+    ret
+END(__set_thread_area)
+
+ENTRY(__clock_getres)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_clock_getres, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__clock_getres)
+
+ENTRY(__clock_gettime)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_clock_gettime, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__clock_gettime)
+
+ENTRY(__gettimeofday)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     16(%esp), %ebx
+    mov     20(%esp), %ecx
+    movl    $__NR_gettimeofday, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %ecx
+    popl    %ebx
+    ret
+END(__gettimeofday)
+
+ENTRY(getrandom)
+    pushl   %ebx
+    .cfi_def_cfa_offset 8
+    .cfi_rel_offset ebx, 0
+    pushl   %ecx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset ecx, 0
+    pushl   %edx
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset edx, 0
+
+    call    __kernel_syscall
+    pushl   %eax
+    .cfi_adjust_cfa_offset 4
+    .cfi_rel_offset eax, 0
+
+    mov     20(%esp), %ebx
+    mov     24(%esp), %ecx
+    mov     28(%esp), %edx
+    movl    $__NR_getrandom, %eax
+    call    *(%esp)
+    addl    $4, %esp
+
+    cmpl    $-MAX_ERRNO, %eax
+    jb      1f
+    negl    %eax
+    pushl   %eax
+    call    __set_errno_internal
+    addl    $4, %esp
+1:
+    popl    %edx
+    popl    %ecx
+    popl    %ebx
+    ret
+END(getrandom)