blob: 69ca2da9727730d78ed33ed4358ad2bdc3cf2f16 [file] [log] [blame]
diff --git a/third_party/libjpeg/jdhuff.c b/third_party/libjpeg/jdhuff.c
index b5ba39f736a7..13dd0f93dc99 100644
--- a/third_party/libjpeg/jdhuff.c
+++ b/third_party/libjpeg/jdhuff.c
@@ -435,26 +435,22 @@ jpeg_huff_decode (bitread_working_state * state,
/*
* Figure F.12: extend sign bit.
- * On some machines, a shift and add will be faster than a table lookup.
+ * On some machines, a shift and sub will be faster than a table lookup.
*/
#ifdef AVOID_TABLES
-#define HUFF_EXTEND(x,s) ((x) < (1<<((s)-1)) ? (x) + (((-1)<<(s)) + 1) : (x))
+#define BIT_MASK(nbits) ((1<<(nbits))-1)
+#define HUFF_EXTEND(x,s) ((x) < (1<<((s)-1)) ? (x) - ((1<<(s))-1) : (x))
#else
-#define HUFF_EXTEND(x,s) ((x) < extend_test[s] ? (x) + extend_offset[s] : (x))
-
-static const int extend_test[16] = /* entry n is 2**(n-1) */
- { 0, 0x0001, 0x0002, 0x0004, 0x0008, 0x0010, 0x0020, 0x0040, 0x0080,
- 0x0100, 0x0200, 0x0400, 0x0800, 0x1000, 0x2000, 0x4000 };
+#define BIT_MASK(nbits) bmask[nbits]
+#define HUFF_EXTEND(x,s) ((x) <= bmask[(s) - 1] ? (x) - bmask[s] : (x))
-static const int extend_offset[16] = /* entry n is (-1 << n) + 1 */
- { 0, ((-1)<<1) + 1, ((-1)<<2) + 1, ((-1)<<3) + 1, ((-1)<<4) + 1,
- ((-1)<<5) + 1, ((-1)<<6) + 1, ((-1)<<7) + 1, ((-1)<<8) + 1,
- ((-1)<<9) + 1, ((-1)<<10) + 1, ((-1)<<11) + 1, ((-1)<<12) + 1,
- ((-1)<<13) + 1, ((-1)<<14) + 1, ((-1)<<15) + 1 };
+static const int bmask[16] = /* bmask[n] is mask for n rightmost bits */
+ { 0, 0x0001, 0x0003, 0x0007, 0x000F, 0x001F, 0x003F, 0x007F, 0x00FF,
+ 0x01FF, 0x03FF, 0x07FF, 0x0FFF, 0x1FFF, 0x3FFF, 0x7FFF };
#endif /* AVOID_TABLES */
diff --git a/third_party/libjpeg/jdphuff.c b/third_party/libjpeg/jdphuff.c
index 22678099451a..32df3af76707 100644
--- a/third_party/libjpeg/jdphuff.c
+++ b/third_party/libjpeg/jdphuff.c
@@ -195,26 +195,22 @@ start_pass_phuff_decoder (j_decompress_ptr cinfo)
/*
* Figure F.12: extend sign bit.
- * On some machines, a shift and add will be faster than a table lookup.
+ * On some machines, a shift and sub will be faster than a table lookup.
*/
#ifdef AVOID_TABLES
-#define HUFF_EXTEND(x,s) ((x) < (1<<((s)-1)) ? (x) + (((-1)<<(s)) + 1) : (x))
+#define BIT_MASK(nbits) ((1<<(nbits))-1)
+#define HUFF_EXTEND(x,s) ((x) < (1<<((s)-1)) ? (x) - ((1<<(s))-1) : (x))
#else
-#define HUFF_EXTEND(x,s) ((x) < extend_test[s] ? (x) + extend_offset[s] : (x))
+#define BIT_MASK(nbits) bmask[nbits]
+#define HUFF_EXTEND(x,s) ((x) <= bmask[(s) - 1] ? (x) - bmask[s] : (x))
-static const int extend_test[16] = /* entry n is 2**(n-1) */
- { 0, 0x0001, 0x0002, 0x0004, 0x0008, 0x0010, 0x0020, 0x0040, 0x0080,
- 0x0100, 0x0200, 0x0400, 0x0800, 0x1000, 0x2000, 0x4000 };
-
-static const int extend_offset[16] = /* entry n is (-1 << n) + 1 */
- { 0, ((-1)<<1) + 1, ((-1)<<2) + 1, ((-1)<<3) + 1, ((-1)<<4) + 1,
- ((-1)<<5) + 1, ((-1)<<6) + 1, ((-1)<<7) + 1, ((-1)<<8) + 1,
- ((-1)<<9) + 1, ((-1)<<10) + 1, ((-1)<<11) + 1, ((-1)<<12) + 1,
- ((-1)<<13) + 1, ((-1)<<14) + 1, ((-1)<<15) + 1 };
+static const int bmask[16] = /* bmask[n] is mask for n rightmost bits */
+ { 0, 0x0001, 0x0003, 0x0007, 0x000F, 0x001F, 0x003F, 0x007F, 0x00FF,
+ 0x01FF, 0x03FF, 0x07FF, 0x0FFF, 0x1FFF, 0x3FFF, 0x7FFF };
#endif /* AVOID_TABLES */