Remove redundant case_Lisp_Int macro

The case_Lisp_Int macro was originally introduced with different
definitions depending on USE_2_TAGS_FOR_INTS.  However, since commit
2b57012478, we have assumed that USE_2_TAGS_FOR_INTS is always
defined, and the macro has only a single definition.  As a result, the
macro is now unnecessary, and replacing it with standard C case labels
improves readability and understanding.

* src/lisp.h (case_Lisp_Int): Delete macro.
* src/alloc.c (process_mark_stack, survives_gc_p):
* src/data.c (Fcl_type_of):
* src/fns.c (value_cmp, sxhash_obj):
* src/pdumper.c (dump_object):
* src/print.c (print_object):
* src/xfaces.c (face_attr_equal_p): Remove uses of above macro.
This commit is contained in:
Stefan Kangas 2025-01-20 01:55:19 +01:00
parent dcccb92562
commit 278d1994af
7 changed files with 16 additions and 9 deletions

View file

@ -7383,7 +7383,8 @@ process_mark_stack (ptrdiff_t base_sp)
break;
}
case_Lisp_Int:
case Lisp_Int0:
case Lisp_Int1:
break;
default:
@ -7437,7 +7438,8 @@ survives_gc_p (Lisp_Object obj)
switch (XTYPE (obj))
{
case_Lisp_Int:
case Lisp_Int0:
case Lisp_Int1:
survives_p = true;
break;

View file

@ -209,7 +209,8 @@ a fixed set of types. */)
{
switch (XTYPE (object))
{
case_Lisp_Int:
case Lisp_Int0:
case Lisp_Int1:
return Qfixnum;
case Lisp_Symbol:

View file

@ -3069,7 +3069,8 @@ value_cmp (Lisp_Object a, Lisp_Object b, int maxdepth)
switch (XTYPE (a))
{
case_Lisp_Int:
case Lisp_Int0:
case Lisp_Int1:
{
EMACS_INT ia = XFIXNUM (a);
if (FIXNUMP (b))
@ -5522,7 +5523,8 @@ sxhash_obj (Lisp_Object obj, int depth)
switch (XTYPE (obj))
{
case_Lisp_Int:
case Lisp_Int0:
case Lisp_Int1:
return XUFIXNUM (obj);
case Lisp_Symbol:

View file

@ -468,7 +468,6 @@ typedef EMACS_INT Lisp_Word;
/* Fixnums use 2 tags, to give them one extra bit, thus
extending their range from, e.g., -2^28..2^28-1 to -2^29..2^29-1. */
#define INTMASK (EMACS_INT_MAX >> (INTTYPEBITS - 1))
#define case_Lisp_Int case Lisp_Int0: case Lisp_Int1
/* Idea stolen from GDB. Pedantic GCC complains about enum bitfields,
and xlc and Oracle Studio c99 complain vociferously about them. */

View file

@ -3238,7 +3238,8 @@ dump_object (struct dump_context *ctx, Lisp_Object object)
case Lisp_Float:
offset = dump_float (ctx, XFLOAT (object));
break;
case_Lisp_Int:
case Lisp_Int0:
case Lisp_Int1:
eassert ("should not be dumping int: is self-representing" && 0);
abort ();
default:

View file

@ -2291,7 +2291,8 @@ print_object (Lisp_Object obj, Lisp_Object printcharfun, bool escapeflag)
switch (XTYPE (obj))
{
case_Lisp_Int:
case Lisp_Int0:
case Lisp_Int1:
{
EMACS_INT i = XFIXNUM (obj);
char escaped_name;

View file

@ -4425,7 +4425,8 @@ face_attr_equal_p (Lisp_Object v1, Lisp_Object v2)
return memcmp (SDATA (v1), SDATA (v2), SBYTES (v1)) == 0;
case_Lisp_Int:
case Lisp_Int0:
case Lisp_Int1:
case Lisp_Symbol:
return false;