[FFmpeg-devel] [RFC v5] libavcodec: add a native Daala decoder
Rostislav Pehlivanov
atomnuker at gmail.com
Sat Jan 2 18:56:40 CET 2016
This commit adds an experimental native Daala decoder.
Changes from previous version:
- Proper copyright and license preservation of the BSD 2-clause (ISC)
license as per the guidelines of the Software Freedom Law Center:
http://www.softwarefreedom.org/resources/2007/gpl-non-gpl-collaboration.html
- Fixed crashes added with the previous RFC revision.
- Flagged the decoder as experimental.
A reminder that the -strict -2 argument needs to be added before the
input file for experimental decoders.
Signed-off-by: Rostislav Pehlivanov <atomnuker at gmail.com>
---
configure | 1 +
libavcodec/Makefile | 2 +
libavcodec/allcodecs.c | 2 +
libavcodec/daala.h | 103 +++
libavcodec/daala_entropy.h | 464 +++++++++++
libavcodec/daala_parser.c | 50 ++
libavcodec/daala_pvq.h | 491 ++++++++++++
libavcodec/daala_utils.h | 240 ++++++
libavcodec/daaladec.c | 824 +++++++++++++++++++
libavcodec/daaladsp.c | 1890 ++++++++++++++++++++++++++++++++++++++++++++
libavcodec/daaladsp.h | 106 +++
libavcodec/daalatab.c | 1570 ++++++++++++++++++++++++++++++++++++
libavcodec/daalatab.h | 108 +++
13 files changed, 5851 insertions(+)
create mode 100644 libavcodec/daala.h
create mode 100644 libavcodec/daala_entropy.h
create mode 100644 libavcodec/daala_parser.c
create mode 100644 libavcodec/daala_pvq.h
create mode 100644 libavcodec/daala_utils.h
create mode 100644 libavcodec/daaladec.c
create mode 100644 libavcodec/daaladsp.c
create mode 100644 libavcodec/daaladsp.h
create mode 100644 libavcodec/daalatab.c
create mode 100644 libavcodec/daalatab.h
diff --git a/configure b/configure
index da74ccd..0fff96f 100755
--- a/configure
+++ b/configure
@@ -2277,6 +2277,7 @@ comfortnoise_encoder_select="lpc"
cook_decoder_select="audiodsp mdct sinewin"
cscd_decoder_select="lzo"
cscd_decoder_suggest="zlib"
+daala_decoder_select="daala_parser"
dca_decoder_select="fmtconvert mdct"
dds_decoder_select="texturedsp"
dirac_decoder_select="dirac_parse dwt golomb videodsp mpegvideoenc"
diff --git a/libavcodec/Makefile b/libavcodec/Makefile
index a18ca5b..5e4ab1a 100644
--- a/libavcodec/Makefile
+++ b/libavcodec/Makefile
@@ -221,6 +221,7 @@ OBJS-$(CONFIG_COMFORTNOISE_ENCODER) += cngenc.o
OBJS-$(CONFIG_CPIA_DECODER) += cpia.o
OBJS-$(CONFIG_CSCD_DECODER) += cscd.o
OBJS-$(CONFIG_CYUV_DECODER) += cyuv.o
+OBJS-$(CONFIG_DAALA_DECODER) += daaladec.o daalatab.o daaladsp.o
OBJS-$(CONFIG_DCA_DECODER) += dcadec.o dca.o dcadsp.o \
dcadata.o dca_exss.o \
dca_xll.o synth_filter.o
@@ -864,6 +865,7 @@ OBJS-$(CONFIG_BMP_PARSER) += bmp_parser.o
OBJS-$(CONFIG_CAVSVIDEO_PARSER) += cavs_parser.o
OBJS-$(CONFIG_COOK_PARSER) += cook_parser.o
OBJS-$(CONFIG_DCA_PARSER) += dca_parser.o dca.o
+OBJS-$(CONFIG_DAALA_PARSER) += daala_parser.o
OBJS-$(CONFIG_DIRAC_PARSER) += dirac_parser.o
OBJS-$(CONFIG_DNXHD_PARSER) += dnxhd_parser.o
OBJS-$(CONFIG_DPX_PARSER) += dpx_parser.o
diff --git a/libavcodec/allcodecs.c b/libavcodec/allcodecs.c
index 4eeb6f3..804c076 100644
--- a/libavcodec/allcodecs.c
+++ b/libavcodec/allcodecs.c
@@ -154,6 +154,7 @@ void avcodec_register_all(void)
REGISTER_DECODER(CPIA, cpia);
REGISTER_DECODER(CSCD, cscd);
REGISTER_DECODER(CYUV, cyuv);
+ REGISTER_DECODER(DAALA, daala);
REGISTER_DECODER(DDS, dds);
REGISTER_DECODER(DFA, dfa);
REGISTER_DECODER(DIRAC, dirac);
@@ -618,6 +619,7 @@ void avcodec_register_all(void)
REGISTER_PARSER(CAVSVIDEO, cavsvideo);
REGISTER_PARSER(COOK, cook);
REGISTER_PARSER(DCA, dca);
+ REGISTER_PARSER(DAALA, daala);
REGISTER_PARSER(DIRAC, dirac);
REGISTER_PARSER(DNXHD, dnxhd);
REGISTER_PARSER(DPX, dpx);
diff --git a/libavcodec/daala.h b/libavcodec/daala.h
new file mode 100644
index 0000000..60acdaf
--- /dev/null
+++ b/libavcodec/daala.h
@@ -0,0 +1,103 @@
+/*
+ * Daala video decoder
+ *
+ * Copyright (C) 2015-2016 Rostislav Pehlivanov <atomnuker at gmail.com>
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * This file incorporates work covered by the following copyright and
+ * permission notice:
+ *
+ * Copyright 2001-2015 Xiph.Org and contributors.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+ * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef AVCODEC_DAALA_H
+#define AVCODEC_DAALA_H
+
+#include "avcodec.h"
+
+/* Essential typedefs */
+typedef uint32_t ent_win; /* Has to be able to express 32bit uint nums */
+typedef uint16_t ent_rng;
+typedef int32_t dctcoef;
+
+#define daalaent_log2(x) (int)(1 + ff_log2(x))
+
+/* Block sizes */
+enum DaalaBsize {
+ DAALA_BLOCKSIZE_4x4 = 0,
+ DAALA_BLOCKSIZE_8x8,
+ DAALA_BLOCKSIZE_16x16,
+ DAALA_BLOCKSIZE_32x32,
+ DAALA_BLOCKSIZE_64x64,
+
+ DAALA_NBSIZES
+};
+
+#define DAALA_MAX_REF_FRAMES 2 /* Maximum number of reference frames */
+#define DAALA_MAX_PLANES 4 /* Redundant but makes loops more descriptive */
+#define DAALA_LOG_BSIZE0 2
+#define DAALA_LOG_BSIZE_MAX (DAALA_LOG_BSIZE0 + DAALA_NBSIZES - 1)
+#define DAALA_BSIZE_MAX (1 << DAALA_LOG_BSIZE_MAX)
+#define DAALA_BSIZE_GRID (1 << (DAALA_NBSIZES - 2))
+#define DAALA_QM_SIZE DAALA_NBSIZES*(DAALA_NBSIZES + 1)
+#define DAALA_CSHIFT 4
+#define DAALA_PCLIP_FPR ((1 << 8 + DAALA_CSHIFT) - 1)
+
+#define DAALA_QM_SCALE (1 << 15)
+#define DAALA_QM_SCALE_MAX (DAALA_QM_SCALE - 1)
+#define DAALA_QM_SCALE_UNIT (1.0f/DAALA_QM_SCALE_MAX)
+#define DAALA_QM_INV_SCALE (1 << 12)
+#define DAALA_QM_INV_SCALE_UNIT (1.0f/DAALA_QM_INV_SCALE)
+#define DAALA_QM_BSIZE (DAALA_BSIZE_MAX*DAALA_BSIZE_MAX)
+#define DAALA_QM_BUFFER_SIZE (DAALA_NBSIZES*2*DAALA_QM_BSIZE)
+
+typedef struct DaalaBitstreamHeader {
+ uint8_t key_frame;
+ uint8_t bipred;
+ uint8_t ref_num;
+ uint8_t act_mask;
+ uint8_t qm;
+ uint8_t haar;
+ uint8_t golden;
+ uint8_t pvq_qm[DAALA_MAX_PLANES][DAALA_QM_SIZE];
+} DaalaBitstreamHeader;
+
+#endif /* AVCODEC_DAALA_H */
diff --git a/libavcodec/daala_entropy.h b/libavcodec/daala_entropy.h
new file mode 100644
index 0000000..6658a80
--- /dev/null
+++ b/libavcodec/daala_entropy.h
@@ -0,0 +1,464 @@
+/*
+ * Daala video decoder
+ *
+ * Copyright (C) 2015-2016 Rostislav Pehlivanov <atomnuker at gmail.com>
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * This file incorporates work covered by the following copyright and
+ * permission notice:
+ *
+ * Copyright 2001-2015 Xiph.Org and contributors.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+ * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef AVCODEC_DAALAENTROPY_H
+#define AVCODEC_DAALAENTROPY_H
+
+#include "libavutil/avassert.h"
+
+#include "daala.h"
+#include "daalatab.h"
+
+#define DAALAENT_UINT_BITS 4
+#define DAALAENT_MODEL_TAB 12
+#define DAALAENT_BIT_ABUNDANCE 16384
+#define DAALAENT_UNARY_MAX INT16_MAX
+#define DAALAENT_SAT(a,b) ((a) - FFMIN(a,b))
+#define DAALAENT_WSIZE (int)sizeof(ent_win)*CHAR_BIT
+#define DAALAENT_CDF_ACCESS(n) (&ff_daalaent_cdf_tab[((n)*((n) - 1) >> 1) - 1])
+
+enum DaalaCDFDecodeType {
+ CDF_NORM = 0,
+ CDF_Q15,
+ CDF_UNSCALED,
+ CDF_DYADIC,
+};
+
+typedef struct DaalaCDF {
+ ent_rng *cdf;
+ int x, y, inc, inc_g, fir;
+ uint8_t gen_mod;
+} DaalaCDF;
+
+typedef struct DaalaEntropy {
+ const uint8_t *buf, *ebuf; /* Normal, ends before the raw bits start */
+ const uint8_t *rbuf, *erbuf; /* Raw, located at the end of the bitstream */
+ ent_rng range;
+ ent_win diff, end_window;
+ int16_t count;
+ int eos_offset, end_window_size;
+ uint8_t err;
+} DaalaEntropy;
+
+/* Expectation value log, outputs Q1 */
+static av_always_inline int daalaent_log_ex(int ex_q16)
+{
+ int o, log = daalaent_log2(ex_q16);
+ if (log < 15) {
+ o = ex_q16*ex_q16 > 2 << 2*log;
+ } else {
+ int tmp = ex_q16 >> (log - 8);
+ o = tmp*tmp > (1 << 15);
+ }
+ return FFMAX(0, 2*log - 33 + o);
+}
+
+/* Number of bits read */
+static av_always_inline int daalaent_bits_count(DaalaEntropy *e)
+{
+ return ((e->ebuf - e->erbuf) + (e->buf - e->rbuf))*8 - \
+ e->count - e->end_window_size + e->eos_offset;
+}
+
+/* Updates the context */
+static av_always_inline void daalaent_fillup(DaalaEntropy *e)
+{
+ int i = DAALAENT_WSIZE - 9 - (e->count + 15);
+ const uint8_t *ebuf = e->ebuf, *buf = e->buf;
+ for (; i >= 0 && buf < ebuf; i -= 8, buf++) {
+ av_assert0(i <= DAALAENT_WSIZE - 8);
+ e->diff |= (ent_win)buf[0] << i;
+ e->count += 8;
+ }
+ if (buf >= ebuf) {
+ e->eos_offset += DAALAENT_BIT_ABUNDANCE - e->count;
+ e->count = DAALAENT_BIT_ABUNDANCE;
+ }
+ e->ebuf = ebuf;
+ e->buf = buf;
+}
+
+/* Updates the generic exponential probability model */
+static av_always_inline void daalaent_exp_model_update(DaalaCDF *c, int *ex, int x,
+ int xs, int id, int integrate)
+{
+ int i, xenc;
+ ent_rng *cdf = &c->cdf[id*c->y];
+ if (cdf[15] + c->inc > 32767) {
+ for (i = 0; i < 16; i++)
+ cdf[i] = (cdf[i] >> 1) + i + 1;
+ }
+ xenc = FFMIN(15, xs);
+ for (i = xenc; i < 16; i++)
+ cdf[i] += c->inc;
+ x = FFMIN(x, 32767);
+ *ex += ((x << 16) - *ex) >> integrate;
+}
+
+
+/* Renormalizes */
+static inline void daalaent_renormalize(DaalaEntropy *e, uint32_t diff,
+ ent_rng range)
+{
+ int i = 16 - daalaent_log2(range);
+ e->diff = diff << i;
+ e->range = range << i;
+ if ((e->count -= i) < 0)
+ daalaent_fillup(e);
+}
+
+/* Decodes a bool from the bitstream, p ∈ (0, p_tot), p_tot ∈ [16384, 32768] */
+static inline uint8_t daalaent_decode_bool(DaalaEntropy *e, uint32_t p,
+ uint32_t p_tot)
+{
+ uint8_t rval;
+ long int diff = e->range - p_tot, tmp = diff >= p_tot;
+ av_assert0(e->diff >> (DAALAENT_WSIZE - 16) < e->range);
+ p <<= tmp;
+ p_tot <<= tmp;
+ tmp = DAALAENT_SAT(2*diff, p_tot);
+ tmp = p + FFMIN(p, tmp) + FFMIN(DAALAENT_SAT(p, tmp) >> 1, diff);
+ diff = tmp << (DAALAENT_WSIZE - 16);
+ rval = e->diff >= diff;
+ diff = e->diff - (rval ? diff : 0);
+ tmp = rval ? e->range - tmp : tmp;
+ daalaent_renormalize(e, diff, tmp);
+ return rval;
+}
+
+/* Decodes a symbol from a CDF table */
+static int daalaent_decode_cdf(DaalaEntropy *e, const ent_rng *cdf,
+ int cdf_size, uint32_t p_tot,
+ enum DaalaCDFDecodeType type)
+{
+ int d, lim, g, scale, ret = 0;
+ ent_rng range = e->range;
+ ent_win diff = e->diff, u = 0, v = 0;
+ const int cshift = DAALAENT_WSIZE - 16;
+ const int cval = diff >> cshift;
+ av_assert0(diff >> cshift < range); /* Probably the most important assert */
+ if (type == CDF_UNSCALED) {
+ p_tot = cdf[cdf_size - 1];
+ av_assert0(2 <= p_tot && p_tot <= 32768);
+ scale = 15 - daalaent_log2(p_tot - 1);
+ p_tot <<= scale;
+ av_assert0(p_tot <= range);
+ if (range - p_tot >= p_tot) {
+ p_tot <<= 1;
+ scale++;
+ }
+ d = range - p_tot;
+ } else if (type == CDF_Q15) {
+ av_assert0(cdf[cdf_size - 1] == 32768);
+ av_assert0(32768 <= range);
+ d = range - 32768;
+ p_tot = 32768;
+ scale = 0;
+ } else if (type == CDF_DYADIC) {
+ av_assert0(cdf[cdf_size - 1] == 1 << p_tot);
+ scale = 15 - p_tot;
+ av_assert0(32768 <= range);
+ d = range - 32768;
+ p_tot = 32768;
+ } else {
+ p_tot = cdf[cdf_size - 1];
+ av_assert0(16384 <= p_tot && p_tot <= 32768);
+ av_assert0(p_tot <= range);
+ scale = range - p_tot >= p_tot;
+ p_tot <<= scale;
+ d = range - p_tot;
+ }
+ g = DAALAENT_SAT(2*d, p_tot);
+ lim = FFMAX(FFMAX(cval >> 1, cval - d), (2*cval + 1 - g)/3) >> scale;
+ for (v = cdf[ret]; v <= lim; v = cdf[++ret])
+ u = v;
+ u <<= scale;
+ v <<= scale;
+ u = u + FFMIN(u, g) + FFMIN(DAALAENT_SAT(u, g) >> 1, d);
+ v = v + FFMIN(v, g) + FFMIN(DAALAENT_SAT(v, g) >> 1, d);
+ range = v - u;
+ diff -= u << cshift;
+ daalaent_renormalize(e, diff, range);
+ return ret;
+}
+
+/* Decodes raw bits from the bitstream, num ∈ [0, 25] */
+static inline ent_win daalaent_decode_bits(DaalaEntropy *e, int num)
+{
+ int avail = e->end_window_size;
+ ent_win ret, win = e->end_window;
+ if (avail < num) {
+ const uint8_t *erbuf = e->erbuf;
+ av_assert0(avail <= DAALAENT_WSIZE - 8);
+ do {
+ if (erbuf <= e->rbuf) {
+ e->eos_offset += DAALAENT_BIT_ABUNDANCE - avail;
+ avail = DAALAENT_BIT_ABUNDANCE;
+ break;
+ }
+ win |= (ent_win)*--erbuf << avail;
+ avail += 8;
+ } while (avail <= DAALAENT_WSIZE - 8);
+ e->erbuf = erbuf;
+ }
+ ret = win & ((1 << num) - 1);
+ win >>= num;
+ avail -= num;
+ e->end_window = win;
+ e->end_window_size = avail;
+ return ret;
+}
+
+/* Gets a single bit if !!cond and returns ±1 */
+#define daalaent_cphase(e, c) (!!(c) ? (1 - 2*daalaent_decode_bits(e, 1)) : 1)
+
+/* "+derf | It was a hack for the screen coding wavelet tools." */
+/* "+jmspeex | Then you don't understand it." */
+static av_always_inline int daalaent_decode_unary(DaalaEntropy *e)
+{
+ int rval = 0;
+ for (; rval < DAALAENT_UNARY_MAX && !daalaent_decode_bits(e, 1); rval++);
+ return rval;
+}
+
+/* Decodes a uint from the bitstream, num ∈ [2, 2^29] */
+static inline ent_win daalaent_decode_uint(DaalaEntropy *e, ent_win num)
+{
+ av_assert0(num <= 1 << (25 + DAALAENT_UINT_BITS));
+ if (num > 1 << DAALAENT_UINT_BITS) {
+ int bit = daalaent_log2(--num) - DAALAENT_UINT_BITS;
+ int adr = (num >> bit) + 1;
+ ent_win t = daalaent_decode_cdf(e, DAALAENT_CDF_ACCESS(adr), adr,
+ 0, CDF_Q15);
+ t = t << bit | daalaent_decode_bits(e, bit);
+ if (t <= num)
+ return t;
+ e->err = 1;
+ return num;
+ }
+ return daalaent_decode_cdf(e, DAALAENT_CDF_ACCESS(num), num, 0, CDF_Q15);
+}
+
+static inline int daalaent_decode_cdf_adapt(DaalaEntropy *e, DaalaCDF *c,
+ int cdf_offset, int num)
+{
+ int i;
+ ent_rng *cdf = &c->cdf[cdf_offset*c->y];
+ const int rval = daalaent_decode_cdf(e, cdf, num, 0, CDF_UNSCALED);
+ if (cdf[num - 1] + c->inc > 32767) {
+ for (i = 0; i < num; i++)
+ cdf[i] = (cdf[i] >> 1) + i + 1;
+ }
+ for (i = rval; i < num; i++)
+ cdf[i] += c->inc;
+ return rval;
+}
+
+/* "Special laplace decoder" */
+static inline int daalaent_decode_laplace(DaalaEntropy *e, unsigned int decay,
+ int max)
+{
+ const ent_rng *cdf;
+ int pos, sym, max_shift, shift = 0, p_shift = 0;
+ if (!max)
+ return 0;
+ while (((max >> shift) >= 15 || max == -1) && decay > 235) {
+ decay = (decay*decay + 128) >> 8;
+ shift++;
+ }
+ max_shift = max >> shift;
+ decay = FFMAX(FFMIN(decay, 254), 2);
+ cdf = ff_daalaent_cdf_exp_tab[(decay + 1) >> 1];
+ do {
+ uint8_t shift_bound = max_shift > 0 && max_shift < 15;
+ int cdf_size = shift_bound ? max_shift + 1 : 16;
+ int cdf_type = shift_bound ? CDF_UNSCALED : CDF_Q15;
+ sym = daalaent_decode_cdf(e, cdf, cdf_size, 0, cdf_type);
+ p_shift += sym;
+ max_shift -= 15;
+ } while (sym >= 15 && max_shift);
+ pos = shift ? (p_shift << shift) + daalaent_decode_bits(e, shift) : p_shift;
+ av_assert0(pos >> shift <= max >> shift || max == -1);
+ if (max != -1 && pos > max) {
+ pos = max;
+ e->err = 1;
+ }
+ return pos;
+}
+
+/* Used by the vector and delta laplace decoding functions for PVQ */
+static inline int daalaent_decode_laplace_pvq(DaalaEntropy *e, unsigned int exp_v,
+ int max_mod_v)
+{
+ int sym = 0, lsb = 0;
+ const int shift = FFMAX(daalaent_log2(exp_v) - 11, 0);
+ const int ex = (exp_v + (1 << shift >> 1)) >> shift;
+ const int maxval = (max_mod_v + (1 << shift >> 1)) >> shift;
+ /* The reference decoder uses snake oil DIVU optimization here */
+ const int decay = FFMIN(254, 256*ex/(ex + 256));
+ const int offset = ff_daalaent_laplace_offset[(decay + 1) >> 1];
+ if (maxval) {
+ ent_rng i, cdf[16];
+ for (i = 0; i < 16; i++)
+ cdf[i] = ff_daalaent_cdf_exp_tab[(decay + 1) >> 1][i] - offset;
+ sym = daalaent_decode_cdf(e, cdf, FFMIN(maxval + 1, 16), 0, CDF_UNSCALED);
+ }
+ if (shift) {
+ if (shift - !sym > 0)
+ lsb = daalaent_decode_bits(e, shift - !sym);
+ lsb -= (!!sym << (shift - 1));
+ }
+ if (sym == 15) /* Tail */
+ sym += daalaent_decode_laplace(e, decay, maxval - 15);
+ return (sym << shift) + lsb;
+}
+
+/* Expectation value is in Q16 */
+static inline int daalaent_decode_generic(DaalaEntropy *e, DaalaCDF *c, int *ex,
+ int max, int integrate)
+{
+ int rval, lsb = 0, log_ex = daalaent_log_ex(*ex);
+ const int shift = FFMAX(0, (log_ex - 5) >> 1);
+ const int id = FFMIN(DAALAENT_MODEL_TAB - 1, log_ex);
+ const int ms = (max + (1 << shift >> 1)) >> shift;
+ int xs = (max == -1) ? 16 : FFMIN(ms + 1, 16);
+ ent_rng *cdf = &c->cdf[id*c->y];
+ if (!max)
+ return 0;
+ if ((xs = daalaent_decode_cdf(e, cdf, xs, 0, CDF_UNSCALED)) == 15) {
+ int g = ((2*(*ex) >> 8) + (1 << shift >> 1)) >> shift;
+ ent_win decay = FFMAX(2, FFMIN(254, 256*g/(g + 256)));
+ xs += daalaent_decode_laplace(e, decay, (max == -1) ? -1 : ms - 15);
+ }
+ if (shift) {
+ if (shift > !xs)
+ lsb = daalaent_decode_bits(e, shift - !xs);
+ lsb -= !!xs << (shift - 1);
+ }
+ rval = (xs << shift) + lsb;
+ daalaent_exp_model_update(c, ex, rval, xs, id, integrate);
+ return rval;
+}
+
+static inline void daalaent_decode_init(DaalaEntropy *e, const uint8_t *buf,
+ int buf_size)
+{
+ e->rbuf = buf;
+ e->erbuf = buf + buf_size;
+ e->buf = buf;
+ e->ebuf = buf + buf_size;
+ e->err = 0;
+ e->diff = 0;
+ e->range = 32768;
+ e->count = -15;
+ e->eos_offset = 18 - DAALAENT_WSIZE;
+ e->end_window = 0;
+ e->end_window_size = 0;
+ daalaent_fillup(e);
+}
+
+static av_always_inline void daalaent_cdf_reset(DaalaCDF *s)
+{
+ int i, j;
+ for (i = 0; i < s->x; i++)
+ for (j = 0; j < s->y; j++)
+ s->cdf[i*s->y + j] = s->inc_g*(j + s->gen_mod) + s->fir;
+}
+
+static inline int daalaent_cdf_alloc(DaalaCDF *s, int x, int y, int inc, int fir,
+ uint8_t inc_shift, uint8_t gen_mod)
+{
+ s->x = x;
+ s->y = y;
+ s->inc = inc;
+ s->gen_mod = gen_mod;
+ s->inc_g = s->inc >> inc_shift;
+ s->fir = !!fir || s->gen_mod ? fir : s->inc_g;
+ s->cdf = av_malloc(x*y*sizeof(ent_rng));
+ if (!s->cdf)
+ return 1;
+ return 0;
+}
+
+static av_always_inline void daalaent_cdf_free(DaalaCDF *s)
+{
+ if (!s)
+ return;
+ av_freep(&s->cdf);
+}
+
+/* Decodes the preamble at the start of every frame */
+static inline int daalaent_decode_frame_header(DaalaEntropy *e,
+ DaalaBitstreamHeader *h,
+ uint8_t planes)
+{
+ int i, j;
+ if (daalaent_decode_bool(e, 16384, 32768))
+ return 1;
+ h->key_frame = daalaent_decode_bool(e, 16384, 32768);
+ if (!h->key_frame) {
+ h->bipred = daalaent_decode_bool(e, 16384, 32768);
+ h->ref_num = 1 + daalaent_decode_uint(e, DAALA_MAX_REF_FRAMES);
+ } else {
+ h->ref_num = 0;
+ }
+ h->act_mask = daalaent_decode_bool(e, 16384, 32768);
+ h->qm = daalaent_decode_bool(e, 16384, 32768);
+ h->haar = daalaent_decode_bool(e, 16384, 32768);
+ h->golden = daalaent_decode_bool(e, 16384, 32768);
+ if (h->key_frame && planes) {
+ for (i = 0; i < planes; i++)
+ for (j = 0; j < DAALA_QM_SIZE; j++)
+ h->pvq_qm[i][j] = daalaent_decode_bits(e, 8);
+ }
+ return 0;
+}
+
+#endif /* AVCODEC_DAALAENTROPY_H */
diff --git a/libavcodec/daala_parser.c b/libavcodec/daala_parser.c
new file mode 100644
index 0000000..71b7ea1
--- /dev/null
+++ b/libavcodec/daala_parser.c
@@ -0,0 +1,50 @@
+/*
+ * Daala video decoder
+ *
+ * Copyright (C) 2015-2016 Rostislav Pehlivanov <atomnuker at gmail.com>
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include "parser.h"
+#include "daala_entropy.h"
+
+static int parse(AVCodecParserContext *s,
+ AVCodecContext *avctx,
+ const uint8_t **poutbuf, int *poutbuf_size,
+ const uint8_t *buf, int buf_size)
+{
+ DaalaEntropy e;
+ DaalaBitstreamHeader h;
+
+ daalaent_decode_init(&e, buf, buf_size);
+ daalaent_decode_frame_header(&e, &h, 0);
+
+ s->key_frame = h.key_frame;
+ s->pict_type = h.key_frame ? AV_PICTURE_TYPE_I : !h.bipred ?
+ AV_PICTURE_TYPE_P : AV_PICTURE_TYPE_B;
+
+ *poutbuf = buf;
+ *poutbuf_size = buf_size;
+
+ return buf_size;
+}
+
+AVCodecParser ff_daala_parser = {
+ .codec_ids = { AV_CODEC_ID_DAALA },
+ .parser_parse = parse,
+};
diff --git a/libavcodec/daala_pvq.h b/libavcodec/daala_pvq.h
new file mode 100644
index 0000000..23c833e
--- /dev/null
+++ b/libavcodec/daala_pvq.h
@@ -0,0 +1,491 @@
+/*
+ * Daala video decoder
+ *
+ * Copyright (C) 2015-2016 Rostislav Pehlivanov <atomnuker at gmail.com>
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * This file incorporates work covered by the following copyright and
+ * permission notice:
+ *
+ * Copyright 2001-2015 Xiph.Org and contributors.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+ * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef AVCODEC_DAALAPVQ_H
+#define AVCODEC_DAALAPVQ_H
+
+#include "daala.h"
+#include "daala_entropy.h"
+
+#define DAALAPVQ_NUM_ADAPTS 4
+
+#define DAALAPVQ_SKIP_ZERO 1
+#define DAALAPVQ_SKIP_COPY 2
+#define DAALAPVQ_PARTITIONS_MAX 9
+
+#define DAALAPVQ_COUNT 2
+#define DAALAPVQ_COUNT_EX 3
+#define DAALAPVQ_K 0
+#define DAALAPVQ_SUM_EX 1
+#define DAALAPVQ_NOVAL INT32_MIN
+
+#define DAALAPVQ_MAX_PART_SIZE (DAALA_QM_BSIZE/2)
+#define DAALAPVQ_COMPAND_SCALE (256 << DAALA_CSHIFT)
+#define DAALAPVQ_COMPAND_SCALE_1 (1.0f/DAALAPVQ_COMPAND_SCALE)
+
+/* PVQ Context struct */
+typedef struct DaalaPVQ {
+ uint8_t phase;
+ int nb_coeffs;
+ int size[DAALAPVQ_PARTITIONS_MAX];
+ int skip[DAALAPVQ_PARTITIONS_MAX];
+ int qmax[DAALAPVQ_PARTITIONS_MAX];
+ /* All of the above is set on every decode call to reduce total arguments */
+
+ /* Everything below is reset on every frame */
+ DaalaCDF pvqcodeword_cdf;
+ DaalaCDF pvqskip_cdf;
+ DaalaCDF pvqtheta_gain_cdf;
+ DaalaCDF pvqgain_ref_mcdf;
+ DaalaCDF pvqgain_noref_mcdf;
+ DaalaCDF pvqtheta_mcdf;
+ int pvqgain_ex[DAALA_MAX_PLANES][DAALA_NBSIZES][DAALAPVQ_PARTITIONS_MAX];
+ int pvqtheta_ex[DAALA_NBSIZES*DAALAPVQ_PARTITIONS_MAX];
+ int pvqadapt[2*DAALAPVQ_NUM_ADAPTS*DAALA_NBSIZES];
+
+ int16_t qmatrix[DAALA_QM_BUFFER_SIZE];
+ int16_t qmatrix_inv[DAALA_NBSIZES*2*DAALA_QM_BSIZE];
+} DaalaPVQ;
+
+/* Index for packed quantization matrices */
+static av_always_inline int daalapvq_get_qm_idx(enum DaalaBsize bsize, int band)
+{
+ return bsize*bsize + bsize + band - band/3;
+}
+
+/* Computes theta quantization range */
+static av_always_inline int daalapvq_calc_theta_quant(double gain, double beta)
+{
+ if (gain < 1.4f)
+ return 1;
+ return lrint((gain*M_PI)/(2.0f*beta));
+}
+
+/* Dequantizes the theta angle */
+static av_always_inline double daalapvq_dequant_theta(int t, int max)
+{
+ if (!max)
+ return 0;
+ return FFMIN(t, max - 1)*0.5f*M_PI/max;
+}
+
+static inline void daalapvq_householder_a(double *x, const double *r, int n)
+{
+ int i;
+ double projection = 0.0f, e = 0.0f;
+ for (i = 0; i < n; i++) {
+ e += r[i]*r[i];
+ projection += r[i]*x[i];
+ }
+ projection *= 2.0f/(1e-100 + e);
+ for (i = 0; i < n; i++)
+ x[i] -= r[i]*projection;
+}
+
+static inline int daalapvq_householder_c(double *r, int n, double gr, int *sign)
+{
+ int i, s, m = 0;
+ double maxr = 0.0f;
+ for (i = 0; i < n; i++) {
+ if (fabs(r[i]) > maxr) {
+ maxr = fabs(r[i]);
+ m = i;
+ }
+ }
+ s = r[m] > 0 ? 1 : -1;
+ r[m] += gr*s;
+ *sign = s;
+ return m;
+}
+
+static inline int daalapvq_decode_gain_interleaved(int x, int ref_gain)
+{
+ if (x < 2*ref_gain - 1) {
+ if (x & 1)
+ return ref_gain - 1 - (x >> 1);
+ else
+ return ref_gain + (x >> 1);
+ }
+ return x + 1;
+}
+
+static av_always_inline double daalapvq_gain_root(double g, int q0, double beta)
+{
+ if (beta == 1.0f)
+ return g/q0;
+ return DAALAPVQ_COMPAND_SCALE*pow(g*DAALAPVQ_COMPAND_SCALE_1, 1.0f/beta)/q0;
+}
+
+static inline double daalapvq_gain_raise(double cg, int q0, double beta)
+{
+ if (beta == 1.0f)
+ return cg*q0;
+ else if (beta == 1.5f) {
+ cg *= q0*DAALAPVQ_COMPAND_SCALE_1;
+ return DAALAPVQ_COMPAND_SCALE*cg*sqrt(cg);
+ }
+ return DAALAPVQ_COMPAND_SCALE*pow(cg*q0*DAALAPVQ_COMPAND_SCALE_1, beta);
+}
+
+static inline double daalapvq_compute_gain(dctcoef *x, int n, int q0, double *g,
+ double beta, const int16_t *qmatrix)
+{
+ int i;
+ double acc = 0.0f;
+ for (i = 0; i < n; i++) {
+ const double tmp = x[i]*(double)qmatrix[i]*DAALA_QM_SCALE_UNIT;
+ acc += tmp*tmp;
+ }
+ *g = sqrt(acc);
+ return daalapvq_gain_root(*g, q0, beta);
+}
+
+static inline int daalapvq_compute_k(double qcg, int itheta, double theta,
+ int has_ref, int n, double beta, int robust)
+{
+ if (!has_ref) {
+ if (!qcg)
+ return 0;
+ if (n == 15 && qcg == 1.0f && beta > 1.25f)
+ return 1;
+ else
+ return FFMAX(1, lrint((qcg - 0.2f)*sqrt((n+3)/2)/beta));
+ }
+ if (!itheta)
+ return 0;
+ if (robust)
+ return FFMAX(1, lrint((itheta - 0.2f)*sqrt((n + 2)/2)));
+ else
+ return FFMAX(1, lrint((qcg*sin(theta) - 0.2f)*sqrt((n + 2)/2)/beta));
+}
+
+
+static inline void daalapvq_synth(dctcoef *xcoeff, dctcoef *ypulse, dctcoef *ref,
+ int n, double gr, uint8_t ref_p, double gain,
+ double theta, const int16_t *qmatrix,
+ const int16_t *qmatrix_inv)
+{
+ int i, m, nn = n - ref_p, s = 0, yy = 0;
+ double scale, r[DAALAPVQ_MAX_PART_SIZE], x[DAALAPVQ_MAX_PART_SIZE];
+ if (ref_p) {
+ for (i = 0; i < n; i++)
+ r[i] = ref[i]*qmatrix[i]*DAALA_QM_SCALE_UNIT;
+ }
+ m = !ref_p ? 0 : daalapvq_householder_c(r, n, gr, &s);
+ for (i = 0; i < nn; i++)
+ yy += ypulse[i]*ypulse[i];
+ scale = !yy ? 0 : gain/sqrt(yy);
+ if (!ref_p) {
+ for (i = 0; i < n; i++)
+ xcoeff[i] = lrint((ypulse[i]*scale)*(qmatrix_inv[i]*DAALA_QM_INV_SCALE_UNIT));
+ } else {
+ scale *= sin(theta);
+ for (i = 0; i < m; i++)
+ x[i] = ypulse[i]*scale;
+ x[m] = -s*gain*cos(theta);
+ for (i = m; i < nn; i++)
+ x[i+1] = ypulse[i]*scale;
+ daalapvq_householder_a(x, r, n);
+ for (i = 0; i < n; i++)
+ xcoeff[i] = lrint(x[i]*qmatrix_inv[i]*DAALA_QM_INV_SCALE_UNIT);
+ }
+}
+
+static void daalapvq_decode_laplace_delta(DaalaEntropy *e, dctcoef *y,
+ int n, int k, dctcoef *curr,
+ const dctcoef *means)
+{
+ int i, k0 = k, k_left = k;
+ int prev = 0, sum_ex = 0, sum_c = 0, pos = 0;
+ int coef = 256*means[DAALAPVQ_COUNT]/(1 + means[DAALAPVQ_COUNT_EX]);
+ memset(y, 0, n*sizeof(dctcoef));
+ coef = FFMAX(coef, 1);
+ for (i = 0; i < k0; i++) {
+ int count;
+ if (!i) {
+ int decay;
+ int ex = coef*(n - prev)/k_left;
+ if (ex > 65280)
+ decay = 255;
+ else
+ decay = FFMIN(255, (int)((256*ex/(ex + 256) + (ex>>5)*ex/((n + 1)*(n - 1)*(n - 1)))));
+ count = daalaent_decode_laplace(e, decay, n - 1);
+ } else {
+ count = daalaent_decode_laplace_pvq(e, coef*(n - prev)/k_left, n - prev - 1);
+ }
+ sum_ex += 256*(n - prev);
+ sum_c += count*k_left;
+ pos += count;
+ av_assert0(pos < n);
+ y[pos] += daalaent_cphase(e, !y[pos]);
+ prev = pos;
+ k_left--;
+ if (!k_left)
+ break;
+ }
+ if (k > 0) {
+ curr[DAALAPVQ_COUNT] = 256*sum_c;
+ curr[DAALAPVQ_COUNT_EX] = sum_ex;
+ } else {
+ curr[DAALAPVQ_COUNT] = -1;
+ curr[DAALAPVQ_COUNT_EX] = 0;
+ }
+ curr[DAALAPVQ_SUM_EX] = curr[DAALAPVQ_K] = 0;
+}
+
+/* Decodes quantized coefficients from the bitsteam */
+static void daalapvq_decode_laplace_vector(DaalaEntropy *e, dctcoef *y,
+ int n, int k, dctcoef *curr,
+ const dctcoef *means)
+{
+ int i, exp_q8, mean_k_q8, mean_sum_ex_q8, sum_ex = 0, kn = k, ran_delta = 0;
+ if (k <= 1) {
+ daalapvq_decode_laplace_delta(e, y, n, k, curr, means);
+ return;
+ }
+ if (!k) {
+ curr[DAALAPVQ_COUNT] = DAALAPVQ_NOVAL;
+ curr[DAALAPVQ_COUNT_EX] = DAALAPVQ_NOVAL;
+ curr[DAALAPVQ_K] = 0;
+ curr[DAALAPVQ_SUM_EX] = 0;
+ memset(y, 0, n*sizeof(dctcoef));
+ return;
+ }
+ mean_k_q8 = means[DAALAPVQ_K];
+ mean_sum_ex_q8 = means[DAALAPVQ_SUM_EX];
+ if (mean_k_q8 < 1 << 23)
+ exp_q8 = 256*mean_k_q8/(1 + mean_sum_ex_q8);
+ else
+ exp_q8 = mean_k_q8/(1 + (mean_sum_ex_q8 >> 8));
+ for (i = 0; i < n; i++) {
+ int x, ex;
+ if (!kn)
+ break;
+ if (kn <= 1 && i != n - 1) {
+ daalapvq_decode_laplace_delta(e, y + i, n - i, kn, curr, means);
+ ran_delta = 1;
+ i = n;
+ break;
+ }
+ ex = (2*exp_q8*kn + (n - i))/(2*(n - i));
+ if (ex > kn*256)
+ ex = kn*256;
+ sum_ex += (2*256*kn + (n - i))/(2*(n - i));
+ if (i != n - 1)
+ x = daalaent_decode_laplace_pvq(e, ex, kn);
+ else
+ x = kn;
+ y[i] = x*daalaent_cphase(e, x);
+ kn -= abs(x);
+ }
+ memset(&y[i], 0, (n - i)*sizeof(dctcoef)); /* Zero the rest */
+ if (!ran_delta) {
+ curr[DAALAPVQ_COUNT] = DAALAPVQ_NOVAL;
+ curr[DAALAPVQ_COUNT_EX] = DAALAPVQ_NOVAL;
+ }
+ curr[DAALAPVQ_K] = k - kn;
+ curr[DAALAPVQ_SUM_EX] = sum_ex;
+}
+
+static av_always_inline void daalapvq_adapt_shuffle(int *dst, int *src, int spd,
+ int idx, int mul)
+{
+ if (src[idx] < 1)
+ return;
+ dst[idx+0] += (mul*src[idx+0] - dst[idx+0]) >> spd;
+ dst[idx+1] += ( src[idx+1] - dst[idx+1]) >> spd;
+}
+
+static void daalapvq_decode_codeword(DaalaEntropy *e, DaalaPVQ *pvq,
+ dctcoef *y, int n, int k, uint8_t has_ref,
+ enum DaalaBsize bsize)
+{
+ int pos, adapt_curr[DAALAPVQ_NUM_ADAPTS] = {0};
+ int *pvq_adapt = pvq->pvqadapt + 4*(2*bsize + !has_ref);
+ const int cdf_id = 2*(n == 15) + !!has_ref, spd = 5;
+ if (k == 1 && n < 16) {
+ pos = daalaent_decode_cdf_adapt(e, &pvq->pvqcodeword_cdf, cdf_id, n - !!has_ref);
+ memset(y, 0, n*sizeof(dctcoef));
+ y[pos] = daalaent_cphase(e, 1);
+ } else {
+ daalapvq_decode_laplace_vector(e, y, n - !!has_ref, k, adapt_curr, pvq_adapt);
+ daalapvq_adapt_shuffle(pvq_adapt, adapt_curr, spd, DAALAPVQ_K, 256);
+ daalapvq_adapt_shuffle(pvq_adapt, adapt_curr, spd, DAALAPVQ_COUNT, 1);
+ }
+}
+
+static void daalapvq_decode_vector(DaalaEntropy *e, DaalaPVQ *pvq,
+ dctcoef *out, dctcoef *ref, const double beta,
+ uint8_t key_frame, int p, uint8_t *skip_rest,
+ uint8_t has_err, int band_idx,
+ int qm_off, enum DaalaBsize bsize)
+{
+ int i, k;
+ int qg = 0, skip = 0, itheta = (!!key_frame), has_ref = !key_frame;
+ double qcg, gain, theta = 0.0f, gr = 0.0f, gain_off = 0.0f;
+ dctcoef tmp[DAALAPVQ_MAX_PART_SIZE] = {0};
+
+ const int robust = has_err || key_frame;
+ const int band_len = pvq->size[band_idx];
+ const int16_t *qmatrix = &pvq->qmatrix[qm_off];
+ const int16_t *qmatrix_inv = &pvq->qmatrix_inv[qm_off];
+
+ if (!skip_rest[(band_idx + 2) % 3]) {
+ int iloc = (!!p)*DAALA_NBSIZES*DAALAPVQ_PARTITIONS_MAX + bsize*DAALAPVQ_PARTITIONS_MAX + band_idx;
+ i = daalaent_decode_cdf_adapt(e, &pvq->pvqtheta_gain_cdf, iloc, 8 + 7*pvq->skip[band_idx]);
+ if (!key_frame && i >= 10)
+ i++;
+ if (key_frame && i >= 8)
+ i++;
+ if (i >= 8) {
+ i -= 8;
+ skip_rest[0] = skip_rest[1] = skip_rest[2] = 1;
+ }
+ qg = i & 1;
+ itheta = (i >> 1) - 1;
+ has_ref = !(itheta == -1);
+ }
+ if (qg) {
+ int *ex = pvq->pvqgain_ex[p][bsize] + band_idx, ex_tmp = *ex;
+ DaalaCDF *mcdf = has_ref ? &pvq->pvqgain_ref_mcdf : &pvq->pvqgain_noref_mcdf;
+ qg = 1 + daalaent_decode_generic(e, mcdf, &ex_tmp, -1, 2);
+ *ex += ((qg << 16) - *ex) >> 2;
+ }
+ if (has_ref) {
+ int icgr, max_theta;
+ const int cfl_enabled = p && key_frame;
+ double cgr = daalapvq_compute_gain(ref, band_len, pvq->qmax[band_idx],
+ &gr, beta, qmatrix);
+ if (cfl_enabled)
+ cgr = 1.0f;
+ icgr = lrint(cgr);
+ qg = daalapvq_decode_gain_interleaved(qg, icgr + !key_frame) - !key_frame;
+ if (!qg && !key_frame)
+ skip = (icgr ? DAALAPVQ_SKIP_ZERO : DAALAPVQ_SKIP_COPY);
+ if (qg == icgr && !itheta && !cfl_enabled)
+ skip = DAALAPVQ_SKIP_COPY;
+ gain_off = cgr - icgr;
+ qcg = qg + gain_off;
+ max_theta = daalapvq_calc_theta_quant(qcg, beta);
+ if (itheta > 1 && (robust || max_theta > 3)) {
+ const int off = bsize*DAALAPVQ_PARTITIONS_MAX + band_idx;
+ int ex_tmp = pvq->pvqtheta_ex[off];
+ itheta = 2 + daalaent_decode_generic(e, &pvq->pvqtheta_mcdf, &ex_tmp,
+ robust ? -1 : max_theta - 3, 2);
+ pvq->pvqtheta_ex[off] += ((itheta << 16) - pvq->pvqtheta_ex[off]) >> 2;
+ }
+ theta = daalapvq_dequant_theta(itheta, max_theta);
+ } else {
+ itheta = 0;
+ qg += !key_frame;
+ qcg = qg;
+ skip = !qg ? DAALAPVQ_SKIP_ZERO : skip;
+ }
+
+ k = daalapvq_compute_k(qcg, itheta, theta, has_ref, band_len, beta, robust);
+ if (k)
+ daalapvq_decode_codeword(e, pvq, tmp, band_len, k, has_ref, bsize);
+
+ if (pvq->phase && has_ref) {
+ const int8_t phase = daalaent_cphase(e, 1);
+ for (i = 0; i < pvq->nb_coeffs; i++)
+ ref[i] *= phase;
+ pvq->phase = 0;
+ }
+
+ if (skip) {
+ if (skip == DAALAPVQ_SKIP_COPY)
+ memcpy(out, ref, band_len*sizeof(dctcoef));
+ else
+ memset(out, 0, band_len*sizeof(dctcoef));
+ return;
+ }
+ gain = daalapvq_gain_raise(qg + gain_off, pvq->qmax[band_idx], beta);
+ daalapvq_synth(out, tmp, ref, band_len, gr, has_ref, gain, theta, qmatrix,
+ qmatrix_inv);
+}
+
+/* q: quantizer, qm = bitstream_header->pvq_qm[p] */
+static void daalapvq_decode(DaalaEntropy *e, DaalaPVQ *pvq,
+ DaalaBitstreamHeader *b, dctcoef *ref,
+ dctcoef *out, int q, uint8_t *pvq_qm, uint8_t p,
+ enum DaalaBsize bsize, const double *beta,
+ int qm_off, int bskip)
+{
+ int i, j;
+ int bands = ff_daala_layouts[bsize].band_offset_size;
+ const int *off = ff_daala_layouts[bsize].band_offset;
+ uint8_t skip_rest[3] = {0};
+ out[0] = bskip & 1;
+ if (!(bskip >> 1)) {
+ for (i = 1; i < 1 << (2*bsize + 4); i++)
+ out[i] = b->key_frame ? 0 : ref[i];
+ } else {
+ pvq->nb_coeffs = off[bands];
+ pvq->phase = p && b->key_frame;
+ for (i = 0; i < bands; i++) {
+ pvq->size[i] = off[i+1] - off[i];
+ pvq->skip[i] = (!i && (i < bands - 1));
+ pvq->qmax[i] = FFMAX(1, q*pvq_qm[daalapvq_get_qm_idx(bsize, i + 1)] >> 4);
+ }
+ for (i = 0; i < bands; i++) {
+ daalapvq_decode_vector(e, pvq, out+off[i], ref+off[i], beta[i], b->key_frame,
+ p, skip_rest, 1, i, qm_off + off[i], bsize);
+ if (!i && !skip_rest[0] && bsize) {
+ int sdir = daalaent_decode_cdf_adapt(e, &pvq->pvqskip_cdf,
+ !!p + 2*(bsize - 1), 7);
+ for (j = 0; j < 3; j++)
+ skip_rest[j] = !!(sdir & (1 << j));
+ }
+ }
+ }
+}
+
+#endif /* AVCODEC_DAALAPVQ_H */
diff --git a/libavcodec/daala_utils.h b/libavcodec/daala_utils.h
new file mode 100644
index 0000000..b998d43
--- /dev/null
+++ b/libavcodec/daala_utils.h
@@ -0,0 +1,240 @@
+/*
+ * Daala video decoder
+ *
+ * Copyright (C) 2015-2016 Rostislav Pehlivanov <atomnuker at gmail.com>
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * This file incorporates work covered by the following copyright and
+ * permission notice:
+ *
+ * Copyright 2001-2015 Xiph.Org and contributors.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+ * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef AVCODEC_DAALAUTIL_H
+#define AVCODEC_DAALAUTIL_H
+
+#include "daala.h"
+
+#define DAALA_BSIZE8x8(arr, bstride, bx, by) ((arr)[(by)*(bstride) + (bx)])
+#define DAALA_BSIZE4x4(arr, bstride, bx, by) DAALA_BSIZE8x8(arr, bstride, (bx) >> 1, (by) >> 1)
+
+static av_always_inline int daala_get_qm_idx(enum DaalaBsize bsize, int b)
+{
+ return bsize*(bsize + 1) + b - b/3;
+}
+
+static av_always_inline int daala_qm_get_offset(enum DaalaBsize bsize, int dec)
+{
+ return DAALA_QM_BSIZE*(bsize*2 + dec);
+}
+
+#define BAND_FROM_RASTER(type) \
+static inline void daala_band_from_raster_##type(const DaalaBandLayout *layout,\
+ type *dst, int stride, \
+ const type *src) \
+{ \
+ int i; \
+ for (i = 0; i < layout->tab_size; i++) { \
+ dst[i] = src[layout->tab[i][1]*stride + layout->tab[i][0]]; \
+ } \
+}
+
+#define RASTER_FROM_BAND(type) \
+static inline void daala_raster_from_band_##type(const DaalaBandLayout *layout,\
+ type *dst, int stride, \
+ const type *src) \
+{ \
+ int i; \
+ for (i = 0; i < layout->tab_size; i++) { \
+ dst[layout->tab[i][1]*stride + layout->tab[i][0]] = src[i]; \
+ } \
+}
+
+BAND_FROM_RASTER(int16_t)
+BAND_FROM_RASTER(int32_t)
+RASTER_FROM_BAND(int16_t)
+RASTER_FROM_BAND(int32_t)
+
+#define RASTER_TO_CODING(type) \
+ static inline void daala_raster_to_coding_##type(type *dst, int n, \
+ const type *src, \
+ int stride) \
+{ \
+ enum DaalaBsize bs; \
+ daala_band_from_raster_##type(&ff_daala_layouts[0], dst + 1, stride, src); \
+ for (bs = 1; bs < DAALA_NBSIZES; bs++) { \
+ const type size = 1 << (DAALA_LOG_BSIZE0 + bs); \
+ const type offset = 1 << 2*(DAALA_LOG_BSIZE0 - 1 + bs); \
+ if (n >= size) { \
+ daala_band_from_raster_##type(&ff_daala_layouts[bs], dst + offset, \
+ stride, src); \
+ } \
+ } \
+ dst[0] = src[0]; \
+}
+
+#define CODING_TO_RASTER(type) \
+ static inline void daala_coding_to_raster_##type(type *dst, int stride, \
+ const type *src, \
+ int n) \
+{ \
+ enum DaalaBsize bs; \
+ daala_raster_from_band_##type(&ff_daala_layouts[0], dst, stride, src + 1); \
+ for (bs = 1; bs < DAALA_NBSIZES; bs++) { \
+ const int size = 1 << (DAALA_LOG_BSIZE0 + bs); \
+ const int offset = 1 << 2*(DAALA_LOG_BSIZE0 - 1 + bs); \
+ if (n >= size) { \
+ daala_raster_from_band_##type(&ff_daala_layouts[bs], dst, stride, \
+ src + offset); \
+ } \
+ } \
+ dst[0] = src[0]; \
+}
+
+RASTER_TO_CODING(int16_t)
+RASTER_TO_CODING(int32_t)
+CODING_TO_RASTER(int16_t)
+CODING_TO_RASTER(int32_t)
+
+static inline void daala_raster_to_coding(dctcoef *dst, int n,
+ const dctcoef *src, int stride)
+{
+ daala_raster_to_coding_int32_t(dst, n, src, stride);
+}
+
+static inline void daala_coding_to_raster(dctcoef *dst, int stride,
+ const dctcoef *src, int n)
+{
+ daala_coding_to_raster_int32_t(dst, stride, src, n);
+}
+
+static inline void daala_init_qmatrix(int16_t *qmatrix, int16_t *qmatrix_inv,
+ int qm_idx)
+{
+ int i, j ,dec;
+ int16_t tmp[DAALA_QM_BSIZE] = {0}, tmp_inv[DAALA_QM_BSIZE] = {0};
+ const uint8_t *qm_mode = ff_daala_qmatrices[av_clip(qm_idx, 0, ff_daala_qmatrices_num)];
+ enum DaalaBsize bsize;
+ for (bsize = 0; bsize < DAALA_NBSIZES; bsize++) {
+ for (dec = 0; dec < 2; dec++) {
+ const int off = daala_qm_get_offset(bsize, dec);
+ int16_t *qm = qmatrix + off;
+ int16_t *qm_inv = qmatrix_inv + off;
+ const double *qm_basis = ff_daala_basis_qm_mag[dec][bsize];
+ for (i = 0; i < 4 << bsize; i++) {
+ for (j = 0; j < 4 << bsize; j++) {
+ double val;
+ double mag = 1.0f;
+ if (i || j) {
+ mag = qm_basis[i]*qm_basis[j];
+ mag /= 0.0625f*qm_mode[(i << 1 >> bsize)*8 + (j << 1 >> bsize)];
+ }
+ val = FFMIN((double)DAALA_QM_SCALE_MAX, round(mag*DAALA_QM_SCALE));
+ tmp[i*(4 << bsize) + j] = (int16_t)val;
+ val = round(DAALA_QM_SCALE*DAALA_QM_INV_SCALE/val);
+ tmp_inv[i*(4 << bsize) + j] = (int16_t)val;
+ }
+ }
+ daala_raster_to_coding_int16_t(qm, 4 << bsize, tmp, 4 << bsize);
+ daala_raster_to_coding_int16_t(qm_inv, 4 << bsize, tmp_inv, 4 << bsize);
+ }
+ }
+}
+
+static inline void daala_safe_free_2d_array(void **arr, int x)
+{
+ int i;
+ if (!arr)
+ return;
+ for (i = 0; i < x; i++)
+ av_free(arr[i]);
+ av_freep(&arr);
+}
+#define DAALA_FREE_2D_ARRAY(arr, xlen)\
+ daala_safe_free_2d_array((void **)arr, xlen);
+
+/* Will free any successfull allocations before the failed one */
+static inline void **daala_safe_alloc_2d_array(int x, int y, unsigned int vsize)
+{
+ int i;
+ void **arr = av_calloc(x, sizeof(void *));
+ if (!arr)
+ return NULL;
+ for (i = 0; i < x; i++) {
+ arr[i] = av_calloc(y, vsize);
+ if (!arr[i]) {
+ if (i > 0)
+ DAALA_FREE_2D_ARRAY(arr, i);
+ return NULL;
+ }
+ }
+ return arr;
+}
+#define DAALA_ALLOC_2D_ARRAY_GOTO(arr, xlen, ylen, vtype, label)\
+ if (!(arr = (vtype **)daala_safe_alloc_2d_array(xlen, ylen, sizeof(vtype)))) goto label;
+
+static inline void daala_safe_free_static_2d_array(void **arr[], int elem, int x)
+{
+ int i;
+ for (i = 0; i < elem; i++)
+ DAALA_FREE_2D_ARRAY(arr[i], x);
+}
+#define DAALA_FREE_STATIC_2D_ARRAY(arr, elem, xlen)\
+ daala_safe_free_static_2d_array((void ***)arr, elem, xlen);
+
+static inline int daala_safe_alloc_static_2d_array(void **arr[], int elem, int x,
+ int y, unsigned int vsize)
+{
+ int i;
+ for (i = 0; i < elem; i++) {
+ arr[i] = daala_safe_alloc_2d_array(x, y, vsize);
+ if (!arr[i]) {
+ if (i > 0)
+ DAALA_FREE_STATIC_2D_ARRAY(&arr[i], i, x);
+ return 1;
+ }
+ }
+ return 0;
+}
+#define DAALA_ALLOC_STATIC_2D_ARRAY_GOTO(arr, elem, xlen, ylen, vtype, label)\
+ if (daala_safe_alloc_static_2d_array((void ***)arr, elem, xlen, ylen, sizeof(vtype))) goto label;
+
+#endif /* AVCODEC_DAALAUTIL_H */
diff --git a/libavcodec/daaladec.c b/libavcodec/daaladec.c
new file mode 100644
index 0000000..c280ae6
--- /dev/null
+++ b/libavcodec/daaladec.c
@@ -0,0 +1,824 @@
+/*
+ * Daala video decoder
+ *
+ * Copyright (C) 2015-2016 Rostislav Pehlivanov <atomnuker at gmail.com>
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * This file incorporates work covered by the following copyright and
+ * permission notice:
+ *
+ * Copyright 2001-2015 Xiph.Org and contributors.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+ * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "daala.h"
+#include "avcodec.h"
+#include "get_bits.h"
+#include "bytestream.h"
+#include "internal.h"
+#include "libavutil/pixdesc.h"
+
+#include "daalatab.h"
+#include "daala_entropy.h"
+#include "daala_utils.h"
+#include "daala_pvq.h"
+#include "daaladsp.h"
+
+#define DERING_SKIP 255
+
+typedef struct DaalaContext {
+ DaalaEntropy e;
+ DaalaPVQ pvq;
+ DaalaDSP dsp;
+ DaalaBitstreamHeader h;
+
+ /* Full precision references flag */
+ int fpr;
+ /* Bytes per pixel */
+ int bpp;
+
+ /* Quantization matrix cache */
+ int last_qm;
+ int quantizer[DAALA_MAX_PLANES];
+
+ /* Picture constants */
+ const struct DaalaPixFmts *fmt;
+ int width, height; /* Padded avctx->width/height */
+ int sbn_x, sbn_y; /* Number of x and y superblocks */
+
+ /* Cumulative distribution functions */
+ DaalaCDF skip_cdf;
+ DaalaCDF dering_cdf;
+ DaalaCDF q_scale_cdf;
+ DaalaCDF haar_coef_cdf; /* Split coef Haar CDF */
+ DaalaCDF haar_split_cdf; /* Tree Splitting Haar CDF */
+ DaalaCDF haar_bit_cdf; /* Bitstream Haar CDF */
+ DaalaCDF haar_dc_mcdf[DAALA_MAX_PLANES]; /* Haar DC lvl. CDF (gen. model) */
+
+ /* Expectation values for MCDFs */
+ int haar_sb_ex[DAALA_MAX_PLANES];
+ int haar_dc_ex[DAALA_MAX_PLANES][DAALA_NBSIZES][3];
+
+ /* Block sizes array */
+ enum DaalaBsize *bsizes;
+ int bsizes_stride;
+
+ /* Buffers */
+ dctcoef *lcoef; /* Temporary resampled CfL coefficients */
+ dctcoef *dcoef[DAALA_MAX_PLANES];
+ dctcoef *ccoef[DAALA_MAX_PLANES];
+ dctcoef **haar_dc_buf[DAALA_MAX_PLANES]; /* #planes by sbn_x by sbn_y big */
+
+ uint8_t **dering; /* 2D array, sbn_x by sbn_y big */
+ uint8_t **q_scale;
+} DaalaContext;
+
+/* Sets the AVFrame type */
+static av_always_inline void set_frame_type(DaalaBitstreamHeader *b, AVFrame *frame)
+{
+ frame->key_frame = b->key_frame;
+ frame->pict_type = frame->key_frame ? AV_PICTURE_TYPE_I : !b->bipred ?
+ AV_PICTURE_TYPE_P : AV_PICTURE_TYPE_B;
+}
+
+static const struct DaalaPixFmts *find_pix_fmt(enum AVPixelFormat fmt)
+{
+ int i;
+ for (i = 0; i < ff_daala_valid_formats_num ; i++)
+ if (ff_daala_valid_formats[i].fmt == fmt)
+ return &ff_daala_valid_formats[i];
+ return NULL;
+}
+
+/* Fetch motion vectors */
+static void get_motion_vectors(DaalaContext *s)
+{
+ if (s->h.key_frame)
+ return;
+}
+
+/* Fetch and decode per plane quantizers */
+static void get_plane_quantizers(DaalaContext *s)
+{
+ int i;
+ for (i = 0; i < s->fmt->planes; i++) {
+ int qcode = daalaent_decode_uint(&s->e, ff_daala_quant_codemap_size);
+ if (!qcode) {
+ s->quantizer[i] = 0;
+ continue;
+ }
+ s->quantizer[i] = qcode < ff_daala_quant_codemap_size ?
+ (ff_daala_quant_codemap[qcode] << DAALA_CSHIFT >> 4) :
+ (ff_daala_quant_codemap[ff_daala_quant_codemap_size-1] << DAALA_CSHIFT >> 4);
+ }
+}
+
+typedef struct HaarGradient {
+ int x, y;
+} HaarGradient;
+
+#define CALC_Q_DC(p) \
+ (s->quantizer[p]*s->h.pvq_qm[p][daala_get_qm_idx(DAALA_NBSIZES-1, 0)] >> 4)
+
+/* Get DC level */
+static void get_haar_dc_sb(DaalaContext *s, HaarGradient *g, dctcoef *d,
+ int x, int y, uint8_t p, uint8_t lim_pass)
+{
+ int q, q_dc;
+ int xdec = s->fmt->dec[p][0];
+ const int aw = s->width >> xdec;
+ const int ln = DAALA_LOG_BSIZE_MAX - xdec;
+ dctcoef dc_pred = 0, **dc_buf = s->haar_dc_buf[p];
+ if (!s->quantizer[p])
+ q_dc = 1;
+ else
+ q_dc = FFMAX(1, CALC_Q_DC(p));
+ if (x && y) {
+ if (lim_pass) {
+ dc_pred = 22*dc_buf[x-1][y-0] - 9*dc_buf[x-1][y-1] +
+ 15*dc_buf[x+0][y-1] + 4*dc_buf[x+1][y-1];
+ } else {
+ dc_pred = 23*dc_buf[x-1][y-0] - 10*dc_buf[x-1][y-1] +
+ 19*dc_buf[x-0][y-1];
+ }
+ dc_pred = (dc_pred + 16) >> 5;
+ } else {
+ dc_pred += x > 0 ? dc_buf[x-1][y-0] : 0;
+ dc_pred += y > 0 ? dc_buf[x-0][y-1] : 0;
+ }
+ q = daalaent_decode_generic(&s->e, &s->haar_dc_mcdf[p], &s->haar_sb_ex[p], -1, 2);
+ q *= daalaent_cphase(&s->e, q);
+ q = q*q_dc + dc_pred;
+ d[(y << ln)*aw + (x << ln)] = q;
+ dc_buf[x][y] = q;
+ g->x = x ? dc_buf[x-1][y-0] - q : g->x;
+ g->y = y ? dc_buf[x-0][y-1] - q : g->y;
+}
+
+/* Get DC level for sub-blocks */
+static void get_haar_dc_lvl(DaalaContext *s, HaarGradient *g, dctcoef *d,
+ int x, int y, uint8_t p, enum DaalaBsize bsize)
+{
+ int i, q_dc, q_ac[2];
+ int xdec = s->fmt->dec[p][0];
+ const int aw = s->width >> xdec;
+ const int ln = bsize - xdec + 2;
+ dctcoef coef[4];
+ if (!s->quantizer[p]) {
+ q_dc = 1;
+ q_ac[0] = 1;
+ q_ac[1] = 1;
+ } else {
+ q_dc = FFMAX(1, CALC_Q_DC(p));
+ q_ac[0] = (q_dc*ff_daala_dc_comp[bsize - xdec][0] + 8) >> 4;
+ q_ac[1] = (q_dc*ff_daala_dc_comp[bsize - xdec][1] + 8) >> 4;
+ }
+ coef[0] = d[(y << ln)*aw + (x << ln)];
+ for (i = 1; i < 4; i++) {
+ int *ex = &s->haar_dc_ex[p][bsize][i-1];
+ int q = daalaent_decode_generic(&s->e, &s->haar_dc_mcdf[p], ex, -1, 2);
+ q *= daalaent_cphase(&s->e, q);
+ coef[i] = q*q_ac[i == 3];
+ }
+ g->x = coef[1] += g->x/5;
+ g->y = coef[2] += g->y/5;
+ daala_haar_kern(&coef[0], &coef[1], &coef[2], &coef[3]);
+ d[(y << ln)*aw + (x << ln)] = coef[0];
+ d[(y << ln)*aw + ((x + 1) << ln)] = coef[1];
+ d[((y + 1) << ln)*aw + (x << ln)] = coef[2];
+ d[((y + 1) << ln)*aw + ((x + 1) << ln)] = coef[3];
+}
+
+/* Quantizer scaling - decoded data not currently used */
+static av_unused inline void decode_quantizer_scaling(DaalaContext *s,
+ int x, int y, uint8_t skip)
+{
+ int q_scale = 0;
+ if (!skip) {
+ int u = y > 0 ? s->q_scale[x][y-1] : 0;
+ int l = x > 0 ? s->q_scale[x-1][y] : 0;
+ q_scale = daalaent_decode_cdf_adapt(&s->e, &s->q_scale_cdf, u + l*4, 4);
+ }
+ s->q_scale[x][y] = q_scale;
+}
+
+static void daala_calc_prediction(DaalaContext *s, dctcoef *pred, const dctcoef *d,
+ int x, int y, uint8_t p, enum DaalaBsize bsize)
+{
+ int n = 1 << (bsize + DAALA_LOG_BSIZE0);
+ int aw = s->width >> s->fmt->dec[p][0];
+ int off = ((y << DAALA_LOG_BSIZE0))*aw + (x << DAALA_LOG_BSIZE0);
+ if (s->h.key_frame) {
+ if (!p || s->h.haar) {
+ memset(pred, 0, n*n*sizeof(dctcoef));
+ if (!p && !s->h.haar && s->dsp.intrapred) {
+ s->dsp.intrapred((uint8_t *)pred, (uint8_t *)&d[off], aw, x, y,
+ s->bsizes, s->bsizes_stride, bsize);
+ }
+ } else {
+ for (y = 0; y < n; y++) {
+ for (x = 0; x < n; x++) {
+ pred[n*y + x] = s->lcoef[n*y + x];
+ }
+ }
+ }
+ } else {
+ /* Copy from mv coeffs */
+ }
+}
+
+static inline int decode_haar_coeff_tree_split(DaalaContext *s, int sum, int ctx,
+ uint8_t coef_split)
+{
+ int a = 0, shift, offset;
+ DaalaCDF *cdf = coef_split ? &s->haar_coef_cdf : &s->haar_split_cdf;
+ if (!sum)
+ return 0;
+ if ((shift = FFMAX(0, daalaent_log2(sum) - 4)))
+ a = daalaent_decode_bits(&s->e, shift);
+ if (coef_split)
+ offset = 15*ctx + (sum >> shift) - 1;
+ else
+ offset = 15*(2*ctx + FFMIN(shift, 1)) + (sum >> shift) - 1;
+ a += daalaent_decode_cdf_adapt(&s->e, cdf, offset,
+ (sum >> shift) + 1) << shift;
+ if (a > sum) {
+ a = sum;
+ s->e.err = 1;
+ }
+ return a;
+}
+
+static void decode_tree_sum(DaalaContext *s, dctcoef *pred, int x, int y,
+ dctcoef sum_t, const int shift, const int dir)
+{
+ dctcoef c_sum, t_sum, sub[2][2], n = 1 << shift;
+ const int offset = dir + 3*(daalaent_log2(FFMAX(x,y)) - 1);
+ if (!sum_t)
+ return;
+ pred[y*n + x] = decode_haar_coeff_tree_split(s, sum_t, offset, 1);
+ if ((c_sum = sum_t - pred[y*n + x])) {
+ uint32_t *ele_1 = &sub[!!dir][!dir], *ele_2 = &sub[!dir][!!dir];
+ t_sum = decode_haar_coeff_tree_split(s, c_sum, !!dir, 0);
+ sub[0][0] = decode_haar_coeff_tree_split(s, t_sum, 2, 0);
+ *ele_1 = t_sum - sub[0][0];
+ *ele_2 = decode_haar_coeff_tree_split(s, c_sum - t_sum, 2, 0);
+ sub[1][1] = c_sum - t_sum - *ele_2;
+ } else {
+ memset(&sub[0][0], 0, 4*sizeof(dctcoef));
+ }
+ if (4*x < n && 4*y < n) {
+ decode_tree_sum(s, pred, 2*x, 2*y, sub[0][0], shift, dir);
+ decode_tree_sum(s, pred, 2*x + 1, 2*y, sub[0][1], shift, dir);
+ decode_tree_sum(s, pred, 2*x, 2*y + 1, sub[1][0], shift, dir);
+ decode_tree_sum(s, pred, 2*x + 1, 2*y + 1, sub[1][1], shift, dir);
+ return;
+ }
+ pred[2*y*n + 2*x] = sub[0][0];
+ pred[2*y*n + 2*x + 1] = sub[0][1];
+ pred[(2*y + 1)*n + 2*x] = sub[1][0];
+ pred[(2*y + 1)*n + 2*x + 1] = sub[1][1];
+}
+
+/* Haar block decoding and transform */
+static void decode_block_haar(DaalaContext *s, int x, int y, uint8_t p,
+ enum DaalaBsize bsize)
+{
+ int i, j, k, l, n = 1 << (bsize + 2);
+ const int dx = x << bsize, dy = y << bsize;
+ const int aw = s->width >> s->fmt->dec[p][0];
+ const int boffset = (dy << 2)*aw + (dx << 2);
+
+ dctcoef tree[4][4];
+ dctcoef pred[DAALA_BSIZE_MAX*DAALA_BSIZE_MAX];
+ dctcoef tpred[DAALA_BSIZE_MAX*DAALA_BSIZE_MAX];
+
+ daala_calc_prediction(s, pred, s->dcoef[p], dx, dy, p, bsize);
+ memcpy(tpred, pred, n*n*sizeof(dctcoef));
+
+ bsize += 2;
+
+ tree[0][0] = daalaent_decode_cdf_adapt(&s->e, &s->haar_bit_cdf, p, 16);
+ if (tree[0][0] == 15)
+ tree[0][0] += daalaent_decode_unary(&s->e);
+
+ if (tree[0][0] > 24) {
+ s->e.err = 1;
+ return;
+ } else if (tree[0][0] > 1) {
+ int tmp = daalaent_decode_bits(&s->e, tree[0][0] - 1);
+ tree[0][0] = (1 << (tree[0][0] - 1)) | tmp;
+ }
+
+ tree[1][1] = decode_haar_coeff_tree_split(s, tree[0][0], 3, 0);
+ tree[0][1] = decode_haar_coeff_tree_split(s, tree[0][0] - tree[1][1], 4, 0);
+ tree[1][0] = tree[0][0] - tree[1][1] - tree[0][1];
+
+ decode_tree_sum(s, pred, 1, 0, tree[0][1], bsize, 0);
+ decode_tree_sum(s, pred, 0, 1, tree[1][0], bsize, 1);
+ decode_tree_sum(s, pred, 1, 1, tree[1][1], bsize, 2);
+
+ for (i = 0; i < n; i++) {
+ for (j = (i == 0); j < n; j++)
+ pred[i*n + j] *= daalaent_cphase(&s->e, pred[i*n + j]);
+ }
+ for (i = 0; i < 3; i++) {
+ for (j = 0; j < bsize; j++) {
+ int bo = (((i + 1) >> 1) << j)*n + (((i + 1) & 1) << j);
+ int q = !s->quantizer[p] ? 1 : s->quantizer[p]*ff_daala_haar_qm[i == 2][j] >> 4;
+ for (k = 0; k < 1 << j; k++)
+ for (l = 0; l < 1 << j; l++)
+ pred[bo + k*n + l] = q*pred[bo + k*n + l] + tpred[bo + k*n + l];
+ }
+ }
+
+ /* DC level */
+ pred[0] = s->dcoef[p][boffset];
+
+ for (i = 0; i < n; i++) {
+ for (j = 0; j < n; j++)
+ s->dcoef[p][boffset + i*aw + j] = pred[i*n + j];
+ }
+
+ /* IDWT */
+ if (s->dsp.idwt)
+ s->dsp.idwt((uint8_t *)(s->ccoef[p] + boffset), aw,
+ (uint8_t *)(s->dcoef[p] + boffset), aw, bsize);
+}
+
+static inline void init_skipped(dctcoef *d, int ostride, dctcoef *pred, int istride,
+ int key_frame)
+{
+ int i, j;
+ for (i = 0; i < istride; i++) {
+ for (j = 0; j < istride; j++) {
+ if (i || j)
+ d[i*ostride + j] = key_frame ? 0 : pred[i*istride + j];
+ }
+ }
+}
+
+/* PVQ decoding and transform */
+static void decode_block_pvq(DaalaContext *s, int x, int y, uint8_t p,
+ enum DaalaBsize bsize, int skip)
+{
+ const int sx = x << bsize;
+ const int sy = y << bsize;
+ const int n = 1 << (bsize + 2);
+ const int aw = s->width >> s->fmt->dec[p][0];
+ const int boffset = (sy << 2)*aw + (sx << 2);
+ const int qm = FFMAX(1, s->quantizer[p]);
+ const int qm_off = daala_qm_get_offset(bsize, s->fmt->dec[p][0]);
+ const double *beta = ff_daalapvq_beta[s->h.act_mask][p][bsize];
+
+ dctcoef *d = s->dcoef[p];
+ dctcoef pred[DAALA_BSIZE_MAX*DAALA_BSIZE_MAX];
+ dctcoef tpred[DAALA_BSIZE_MAX*DAALA_BSIZE_MAX];
+
+ daala_calc_prediction(s, pred, d, sx, sy, p, bsize);
+
+ init_skipped(&d[boffset], aw, pred, n, s->h.key_frame);
+
+ daala_raster_to_coding(tpred, n, pred, n);
+
+ daalapvq_decode(&s->e, &s->pvq, &s->h, tpred, pred, qm, s->h.pvq_qm[p],
+ p, bsize, beta, qm_off, skip);
+
+ pred[0] = d[boffset];
+
+ daala_coding_to_raster(&d[boffset], aw, pred, n);
+
+ /* IDCT */
+ if (s->dsp.idct[bsize])
+ s->dsp.idct[bsize]((uint8_t *)(s->ccoef[p] + boffset), aw,
+ (uint8_t *)(s->dcoef[p] + boffset), aw);
+}
+
+/* Segments frame && decodes */
+static int decode_block_rec(DaalaContext *s, HaarGradient g, int x, int y,
+ uint8_t p, enum DaalaBsize bsize)
+{
+ int i, j, lc_skip, cbs;
+ const int sx = x << bsize;
+ const int sy = y << bsize;
+ const int llim = 1 << bsize;
+ const int off = 2*bsize + !!p;
+ const int bst = s->bsizes_stride;
+ const int xdec = s->fmt->dec[p][0];
+ const int aw = s->width >> xdec;
+ enum DaalaBsize obsize = DAALA_BSIZE4x4(s->bsizes, bst, sx, sy);
+
+ if (s->h.haar) {
+ obsize = bsize;
+ } else if (!p) {
+ lc_skip = daalaent_decode_cdf_adapt(&s->e, &s->skip_cdf, off,
+ 4 + (bsize > 0));
+ obsize = lc_skip < 4 ? bsize : -1;
+ }
+
+ if ((cbs = FFMAX(obsize, xdec)) == bsize) {
+ cbs -= xdec;
+ if (!p) {
+ for (i = 0; i < llim; i++)
+ for (j = 0; j < llim; j++)
+ DAALA_BSIZE4x4(s->bsizes, bst, sx + i, sy + j) = bsize;
+ }
+ if (p && s->dsp.cfl) {
+ s->dsp.cfl((uint8_t *)s->lcoef, 1 << (cbs + DAALA_LOG_BSIZE0),
+ (uint8_t *)(s->dcoef[0] + (y << (2 + bsize))*s->width + (x << (2 + bsize))),
+ s->width, xdec, s->fmt->dec[p][1], cbs, obsize);
+ }
+ if (p && !s->h.haar) {
+ lc_skip = daalaent_decode_cdf_adapt(&s->e, &s->skip_cdf, off, 4);
+ }
+ if (s->h.haar)
+ decode_block_haar(s, x, y, p, cbs);
+ else
+ decode_block_pvq(s, x, y, p, cbs, lc_skip);
+ } else {
+ int bs = bsize - s->fmt->dec[p][0];
+ int bo = (y << (DAALA_LOG_BSIZE0 + bs))*aw + (x << (DAALA_LOG_BSIZE0 + bs));
+ int hfilter = (x + 1) << (DAALA_LOG_BSIZE0 + bs) <= s->width;
+ int vfilter = (y + 1) << (DAALA_LOG_BSIZE0 + bs) <= s->height;
+ if (!s->h.key_frame && s->dsp.pre_split_filter)
+ s->dsp.pre_split_filter((uint8_t *)(s->ccoef + bo), aw, bs,
+ hfilter, vfilter);
+ if (s->h.key_frame)
+ get_haar_dc_lvl(s, &g, s->dcoef[p], 2*x, 2*y, p, bsize - 1);
+ decode_block_rec(s, g, 2*x + 0, 2*y + 0, p, bsize - 1);
+ decode_block_rec(s, g, 2*x + 1, 2*y + 0, p, bsize - 1);
+ decode_block_rec(s, g, 2*x + 0, 2*y + 1, p, bsize - 1);
+ decode_block_rec(s, g, 2*x + 1, 2*y + 1, p, bsize - 1);
+ if (s->dsp.post_split_filter)
+ s->dsp.post_split_filter((uint8_t *)(s->ccoef[p] + bo), aw, bs,
+ hfilter, vfilter);
+ }
+
+ return 0;
+}
+
+static void decode_blocks(DaalaContext *s, AVFrame *frame)
+{
+ int x, y, p;
+ for (y = 0; y < s->sbn_y; y++) {
+ for (x = 0; x < s->sbn_x; x++) {
+ for (p = 0; p < s->fmt->planes; p++) {
+ struct HaarGradient g = {0};
+ if (s->h.key_frame)
+ get_haar_dc_sb(s, &g, s->dcoef[p], x, y, p, y > 0 && x < s->sbn_x - 1);
+ decode_block_rec(s, g, x, y, p, DAALA_NBSIZES - 1);
+ }
+ }
+ }
+}
+
+/* Applies post-decode filtering */
+static void apply_filtering(DaalaContext *s, AVFrame *frame)
+{
+ int p, x, y;
+ for (p = 0; p < s->fmt->planes; p++) {
+ int xdec = s->fmt->dec[p][0];
+ int ydec = s->fmt->dec[p][1];
+ int aw = s->width >> xdec;
+ if (!s->h.haar && s->dsp.frame_postfilter) {
+ s->dsp.frame_postfilter((uint8_t *)s->ccoef[p], aw, s->sbn_x,
+ s->sbn_y, xdec, ydec);
+ }
+ }
+ for (x = 0; x < s->sbn_x; x++) {
+ for (y = 0; y < s->sbn_y; y++) {
+ int off;
+ uint8_t u, l, res;
+ if (s->dering[x][y] == DERING_SKIP) {
+ s->dering[x][y] = 0;
+ continue;
+ }
+ u = y ? s->dering[x][y-1] << 1 : 0;
+ l = x ? s->dering[x-1][y] << 0 : 0;
+ off = av_clip(u + l, 0, s->dering_cdf.x);
+ res = daalaent_decode_cdf_adapt(&s->e, &s->dering_cdf, off, 2);
+ s->dering[x][y] = res;
+ }
+ }
+}
+
+/* Reinits adaptive probability contexts */
+static void reset_cdfs(DaalaContext *s)
+{
+ int i, j, k;
+ const int pvq_ex_const = 2 << 16;
+
+ /* CDFs */
+ daalaent_cdf_reset(&s->dering_cdf);
+ daalaent_cdf_reset(&s->q_scale_cdf);
+ daalaent_cdf_reset(&s->haar_bit_cdf);
+ daalaent_cdf_reset(&s->haar_split_cdf);
+ daalaent_cdf_reset(&s->skip_cdf);
+ daalaent_cdf_reset(&s->haar_coef_cdf);
+
+ /* PVQ CDFs */
+ daalaent_cdf_reset(&s->pvq.pvqcodeword_cdf);
+ daalaent_cdf_reset(&s->pvq.pvqskip_cdf);
+ daalaent_cdf_reset(&s->pvq.pvqtheta_gain_cdf);
+ daalaent_cdf_reset(&s->pvq.pvqtheta_mcdf);
+ daalaent_cdf_reset(&s->pvq.pvqgain_ref_mcdf);
+ daalaent_cdf_reset(&s->pvq.pvqgain_noref_mcdf);
+
+ /* DC MCDFs + Expectation values */
+ for (i = 0; i < DAALA_NBSIZES*DAALAPVQ_PARTITIONS_MAX; i++)
+ s->pvq.pvqtheta_ex[i] = s->h.key_frame ? 24576 : pvq_ex_const;
+
+ for (i = 0; i < 2*DAALA_NBSIZES; i++) {
+ s->pvq.pvqadapt[4*i + DAALAPVQ_K] = 384;
+ s->pvq.pvqadapt[4*i + DAALAPVQ_SUM_EX] = 256;
+ s->pvq.pvqadapt[4*i + DAALAPVQ_COUNT] = 104;
+ s->pvq.pvqadapt[4*i + DAALAPVQ_COUNT_EX] = 128;
+ }
+
+ for (i = 0; i < s->fmt->planes; i++) {
+ daalaent_cdf_reset(&s->haar_dc_mcdf[i]);
+ s->haar_sb_ex[i] = i > 0 ? 8 : 32768;
+ for (j = 0; j < DAALA_NBSIZES; j++) {
+ for (k = 0; k < 3; k++)
+ s->haar_dc_ex[i][j][k] = i > 0 ? 8 : 32768;
+ for (k = 0; k < DAALAPVQ_PARTITIONS_MAX; k++)
+ s->pvq.pvqgain_ex[i][j][k] = pvq_ex_const;
+ }
+ }
+}
+
+static int daala_decode_frame(AVCodecContext *avctx, void *data,
+ int *got_frame, AVPacket *avpkt)
+{
+ int i, j, p, ret;
+ AVFrame *frame = data;
+ DaalaContext *s = avctx->priv_data;
+
+ if ((ret = ff_get_buffer(avctx, frame, 0)) < 0)
+ return ret;
+
+ /* Init the entropy decoding context */
+ daalaent_decode_init(&s->e, avpkt->data, avpkt->size);
+
+ /* Read the frame miniheader */
+ if (daalaent_decode_frame_header(&s->e, &s->h, s->fmt->planes))
+ return AVERROR_INVALIDDATA;
+
+ /* Update AVFrame type */
+ set_frame_type(&s->h, frame);
+
+ /* (Re)init the PVQ quantization matrix */
+ if (s->last_qm != s->h.qm) {
+ daala_init_qmatrix(s->pvq.qmatrix, s->pvq.qmatrix_inv, s->h.qm);
+ s->last_qm = s->h.qm;
+ }
+
+ /* Prepare the CDFs and MCDFs for use */
+ reset_cdfs(s);
+
+ /* TODO: Actually read the MVs */
+ get_motion_vectors(s);
+
+ /* Get per-plane quantizers (all identical for now) */
+ get_plane_quantizers(s);
+
+ /* NOTE: Remove when !I-frame support is added */
+ if (s->h.key_frame) {
+ /* Recursively decode all blocks */
+ decode_blocks(s, frame);
+
+ /* Apply frame postfilter */
+ apply_filtering(s, frame);
+ }
+
+ /* Copy data to frame */
+ for (p = 0; p < s->fmt->planes; p++) {
+ dctcoef *src = s->ccoef[p];
+ uint8_t *dst = frame->data[p];
+ int stride = s->width >> s->fmt->dec[p][0];
+ int r_w = avctx->width >> s->fmt->dec[p][0];
+ int r_h = avctx->height >> s->fmt->dec[p][1];
+ if (s->fpr) {
+ const int cs = !s->quantizer[p] ? DAALA_CSHIFT - (s->fmt->depth_mode - 1)*2 : 0;
+ for (i = 0; i < r_h; i++) {
+ for (j = 0; j < r_w; j++)
+ dst[j] = av_clip(src[j]*(1 << cs) + (128 << DAALA_CSHIFT), 0, DAALA_PCLIP_FPR);
+ dst += frame->linesize[p];
+ src += stride;
+ }
+ } else {
+ const int cs = !s->quantizer[p] ? (s->fmt->depth_mode - 1)*2 : DAALA_CSHIFT;
+ for (i = 0; i < r_h; i++) {
+ for (j = 0; j < r_w; j++)
+ dst[j] = av_clip_uint8(((src[j] + (1 << cs >> 1)) >> cs) + 128);
+ dst += frame->linesize[p];
+ src += stride;
+ }
+ }
+ }
+
+ if (s->e.err)
+ av_log(avctx, AV_LOG_WARNING, "Bitstream error!\n");
+
+ *got_frame = 1;
+
+ return daalaent_bits_count(&s->e) >> 3;
+}
+
+static av_cold int daala_decode_free(AVCodecContext *avctx)
+{
+ int i;
+ DaalaContext *s = avctx->priv_data;
+
+ /* Blocksizes array */
+ s->bsizes -= DAALA_BSIZE_GRID*s->bsizes_stride + DAALA_BSIZE_GRID;
+ av_freep(&s->bsizes);
+
+ /* CDF */
+ daalaent_cdf_free(&s->dering_cdf);
+ daalaent_cdf_free(&s->q_scale_cdf);
+ daalaent_cdf_free(&s->haar_bit_cdf);
+ daalaent_cdf_free(&s->haar_split_cdf);
+ daalaent_cdf_free(&s->skip_cdf);
+ daalaent_cdf_free(&s->haar_coef_cdf);
+ for (i = 0; i < s->fmt->planes; i++)
+ daalaent_cdf_free(&s->haar_dc_mcdf[i]);
+
+ /* PVQ CDFs */
+ daalaent_cdf_free(&s->pvq.pvqcodeword_cdf);
+ daalaent_cdf_free(&s->pvq.pvqskip_cdf);
+ daalaent_cdf_free(&s->pvq.pvqtheta_gain_cdf);
+ daalaent_cdf_free(&s->pvq.pvqtheta_mcdf);
+ daalaent_cdf_free(&s->pvq.pvqgain_ref_mcdf);
+ daalaent_cdf_free(&s->pvq.pvqgain_noref_mcdf);
+
+ /* Coefs */
+ for (i = 0; i < s->fmt->planes; i++)
+ av_freep(&s->ccoef[i]);
+
+ av_freep(&s->dcoef[0]);
+ av_freep(&s->dcoef[1]);
+
+ av_freep(&s->lcoef);
+
+ /* 2(.5)D tables */
+ DAALA_FREE_2D_ARRAY(s->dering, s->sbn_x);
+ DAALA_FREE_2D_ARRAY(s->q_scale, s->sbn_x);
+ DAALA_FREE_STATIC_2D_ARRAY(s->haar_dc_buf, s->fmt->planes, s->sbn_x);
+
+ return 0;
+}
+
+static av_cold int daala_decode_init(AVCodecContext *avctx)
+{
+ int i, luma_w, luma_h, chroma_w, chroma_h, err = 0;
+ DaalaContext *s = avctx->priv_data;
+
+ /* Inits a default QM, if the file isn't using the default it will be reinit */
+ s->last_qm = 1;
+ daala_init_qmatrix(s->pvq.qmatrix, s->pvq.qmatrix_inv, s->last_qm);
+
+ s->fmt = find_pix_fmt(avctx->pix_fmt);
+ if (!s->fmt) {
+ av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format - %s!\n",
+ av_get_pix_fmt_name(avctx->pix_fmt));
+ return AVERROR(EINVAL);
+ }
+
+ if (ff_daaladsp_init(&s->dsp, s->fmt->depth)) {
+ av_log(avctx, AV_LOG_ERROR, "Unsupported bit depth - %i!\n",
+ s->fmt->depth);
+ return AVERROR(EINVAL);
+ }
+
+ /* Can be turned on for 8 bit files too and the demuxer gets it, but how
+ * the hell to actually get that flag from the demuxer to this decoder? */
+ s->fpr = s->fmt->depth > 8 ? 1 : 0;
+ s->bpp = 4;
+
+ s->width = FFALIGN(avctx->width, DAALA_BSIZE_MAX);
+ s->height = FFALIGN(avctx->height, DAALA_BSIZE_MAX);
+
+ luma_w = s->width >> s->fmt->dec[0][0];
+ luma_h = s->height >> s->fmt->dec[0][1];
+ chroma_w = s->width >> s->fmt->dec[1][0];
+ chroma_h = s->height >> s->fmt->dec[1][1];
+
+ s->sbn_x = s->width >> DAALA_LOG_BSIZE_MAX;
+ s->sbn_y = s->height >> DAALA_LOG_BSIZE_MAX;
+
+ /* Block sizes array */
+ s->bsizes_stride = (s->sbn_x + 2)*DAALA_BSIZE_GRID;
+ s->bsizes = av_malloc(sizeof(enum DaalaBsize)*s->bsizes_stride*(s->sbn_y + 2)*DAALA_BSIZE_GRID);
+ if (!s->bsizes)
+ return AVERROR(ENOMEM);
+
+ /* Like motion vectors, this could be outside the image, so shift it */
+ s->bsizes += DAALA_BSIZE_GRID*s->bsizes_stride + DAALA_BSIZE_GRID;
+
+ /* CDF init */
+ err |= daalaent_cdf_alloc(&s->dering_cdf, 4, 2, 128, 0, 2, 0);
+ err |= daalaent_cdf_alloc(&s->q_scale_cdf, 8, 4, 128, 0, 2, 0);
+ err |= daalaent_cdf_alloc(&s->haar_bit_cdf, 3, 16, 128, 0, 2, 0);
+ err |= daalaent_cdf_alloc(&s->haar_split_cdf, 15*2*5, 16, 128, 0, 2, 0);
+ err |= daalaent_cdf_alloc(&s->skip_cdf, DAALA_NBSIZES*2, 5, 128, 0, 2, 0);
+ err |= daalaent_cdf_alloc(&s->haar_coef_cdf, 15*3*(DAALA_NBSIZES+1), 16, 128, 0, 2, 0);
+ for (i = 0; i < s->fmt->planes; i++)
+ err |= daalaent_cdf_alloc(&s->haar_dc_mcdf[i], DAALAENT_MODEL_TAB, 16, 64, 0, 0, 1);
+
+ /* PVQ CDFs */
+ err |= daalaent_cdf_alloc(&s->pvq.pvqcodeword_cdf, 4, 16, 128, 0, 0, 0);
+ err |= daalaent_cdf_alloc(&s->pvq.pvqskip_cdf, 2*(DAALA_NBSIZES-1), 7, 128, 0, 2, 0);
+ err |= daalaent_cdf_alloc(&s->pvq.pvqtheta_gain_cdf, 2*DAALA_NBSIZES*DAALAPVQ_PARTITIONS_MAX, 16, 128, 0, 2, 0);
+ err |= daalaent_cdf_alloc(&s->pvq.pvqtheta_mcdf, DAALAENT_MODEL_TAB, 16, 64, 0, 0, 1);
+ err |= daalaent_cdf_alloc(&s->pvq.pvqgain_ref_mcdf, DAALAENT_MODEL_TAB, 16, 64, 0, 0, 1);
+ err |= daalaent_cdf_alloc(&s->pvq.pvqgain_noref_mcdf, DAALAENT_MODEL_TAB, 16, 64, 0, 0, 1);
+
+ /* The allocations above are unlikely to fail so checking here is okay */
+ if (err)
+ goto alloc_fail;
+
+ /* Arrays */
+ DAALA_ALLOC_2D_ARRAY_GOTO(s->dering, s->sbn_x, s->sbn_y, uint8_t, alloc_fail);
+ DAALA_ALLOC_2D_ARRAY_GOTO(s->q_scale, s->sbn_x, s->sbn_y, uint8_t, alloc_fail);
+ DAALA_ALLOC_STATIC_2D_ARRAY_GOTO(s->haar_dc_buf, s->fmt->planes, s->sbn_x,
+ s->sbn_y, dctcoef, alloc_fail);
+
+ /* Coefficients, still too much */
+ s->lcoef = av_malloc(luma_w*luma_h*s->bpp);
+ if (!s->lcoef)
+ goto alloc_fail;
+
+ s->dcoef[0] = av_malloc(luma_w*luma_h*s->bpp);
+ if (!s->dcoef[0])
+ goto alloc_fail;
+
+ s->dcoef[1] = av_malloc(chroma_w*chroma_h*s->bpp);
+ s->dcoef[3] = s->dcoef[2] = s->dcoef[1];
+ if (!s->dcoef[1])
+ goto alloc_fail;
+
+ for (i = 0; i < s->fmt->planes; i++) {
+ const int p_w = s->width >> s->fmt->dec[0][0];
+ const int p_h = s->height >> s->fmt->dec[0][1];
+ s->ccoef[i] = av_malloc(p_w*p_h*s->bpp);
+ if (!s->ccoef[i])
+ goto alloc_fail;
+ }
+
+ return 0;
+
+alloc_fail:
+ av_log(avctx, AV_LOG_ERROR, "Failed to allocate memory!\n");
+ daala_decode_free(avctx);
+ return AVERROR(ENOMEM);
+}
+
+AVCodec ff_daala_decoder = {
+ .name = "daala",
+ .long_name = NULL_IF_CONFIG_SMALL("Daala"),
+ .type = AVMEDIA_TYPE_VIDEO,
+ .id = AV_CODEC_ID_DAALA,
+ .priv_data_size = sizeof(DaalaContext),
+ .capabilities = AV_CODEC_CAP_EXPERIMENTAL,
+ .init = daala_decode_init,
+ .close = daala_decode_free,
+ .decode = daala_decode_frame,
+};
diff --git a/libavcodec/daaladsp.c b/libavcodec/daaladsp.c
new file mode 100644
index 0000000..93535d2
--- /dev/null
+++ b/libavcodec/daaladsp.c
@@ -0,0 +1,1890 @@
+/*
+ * Daala video decoder
+ *
+ * Copyright (C) 2015-2016 Rostislav Pehlivanov <atomnuker at gmail.com>
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * This file incorporates work covered by the following copyright and
+ * permission notice:
+ *
+ * Copyright 2001-2015 Xiph.Org and contributors.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+ * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "libavutil/intreadwrite.h"
+#include "daaladsp.h"
+#include "daalatab.h"
+#include "daala_utils.h"
+
+#define TEMPLATE_8bit
+
+#if defined(TEMPLATE_8bit)
+
+# define RENAME(N) N ## _8bit
+# define READ(P) AV_RN32(P)
+# define WRITE(P, V) AV_WN32(P, V)
+# define pixel int32_t
+# undef TEMPLATE_8bit
+
+#endif
+
+#define DAALA_DCT_RSHIFT(_a, _b) (((pixel)(((uint32_t)(_a) >> (32 - (_b))) + (_a))) >> (_b))
+
+static void RENAME(daala_haar_iwt)(uint8_t *_dst, const int dst_stride,
+ const uint8_t *_src, const int src_stride,
+ const int ln)
+{
+ int i, j, p, lvl;
+ pixel *dst = (pixel *)_dst;
+ pixel *src = (pixel *)_src;
+ dst[0] = src[0];
+ for (lvl = ln - 1; lvl > -1; lvl--) {
+ p = 1 << (ln - 1 - lvl);
+ for (i = p - 1; i > -1; i--) {
+ for (j = p - 1; j > -1; j--) {
+ const pixel a = src[(i + 0)*src_stride + j + p];
+ const pixel b = src[(i + p)*src_stride + j + 0];
+ const pixel c = src[(i + p)*src_stride + j + p] - a;
+ const pixel d = dst[(i + 0)*dst_stride + j + 0] + b;
+ const pixel e = (d - c) >> 1;
+ const pixel f = e - a;
+ const pixel g = e - b;
+ dst[(2*i + 1)*dst_stride + 2*j + 0] = f;
+ dst[(2*i + 0)*dst_stride + 2*j + 1] = g;
+ dst[(2*i + 0)*dst_stride + 2*j + 0] = d - f;
+ dst[(2*i + 1)*dst_stride + 2*j + 1] = c + g;
+ }
+ }
+ }
+}
+
+static av_always_inline void idct_1D_4(pixel *x, int xstride, pixel y[4])
+{
+ pixel tmp;
+ y[3] += (y[1]*18293 + 8192) >> 14;
+ y[1] -= (y[3]*21407 + 16384) >> 15;
+ y[3] += (y[1]*23013 + 16384) >> 15;
+ y[2] = y[0] - y[2];
+ tmp = DAALA_DCT_RSHIFT(y[2], 1);
+ y[0] -= tmp - DAALA_DCT_RSHIFT(y[3], 1);
+ y[1] = tmp - y[1];
+ WRITE(x + 0*xstride, y[0] );
+ WRITE(x + 1*xstride, y[2] - y[1]);
+ WRITE(x + 2*xstride, y[1] );
+ WRITE(x + 3*xstride, y[0] - y[3]);
+}
+
+static av_always_inline void idct_1D_8(pixel *x, int xstride, pixel y[8])
+{
+ pixel tmp[3];
+ y[5] -= (y[3]*2485 + 4096) >> 13;
+ y[3] += (y[5]*18205 + 16384) >> 15;
+ y[5] -= (y[3]*2485 + 4096) >> 13;
+ y[7] -= (y[1]*3227 + 16384) >> 15;
+ y[1] += (y[7]*6393 + 16384) >> 15;
+ y[7] -= (y[1]*3227 + 16384) >> 15;
+ y[1] += y[3];
+ tmp[0] = DAALA_DCT_RSHIFT(y[1], 1);
+ y[3] = tmp[0] - y[3];
+ y[5] += y[7];
+ y[7] = DAALA_DCT_RSHIFT(y[5], 1) - y[7];
+ y[3] += (y[5]*7489 + 4096) >> 13;
+ y[5] -= (y[3]*11585 + 8192) >> 14;
+ y[3] -= (y[5]*19195 + 16384) >> 15;
+ y[6] += (y[2]*21895 + 16384) >> 15;
+ y[2] -= (y[6]*15137 + 8192) >> 14;
+ y[6] += (y[2]*21895 + 16384) >> 15;
+ y[0] += (y[4]*13573 + 16384) >> 15;
+ y[4] -= (y[0]*11585 + 8192) >> 14;
+ y[0] += (y[4]*13573 + 16384) >> 15;
+ y[4] = y[2] - y[4];
+ tmp[1] = DAALA_DCT_RSHIFT(y[4], 1);
+ y[2] = tmp[1] - y[2];
+ y[6] = y[0] - y[6];
+ tmp[2] = DAALA_DCT_RSHIFT(y[6], 1);
+ y[0] -= tmp[2];
+ y[7] = tmp[2] - y[7];
+ y[6] -= y[7];
+ y[2] += DAALA_DCT_RSHIFT(y[3], 1);
+ y[3] = y[2] - y[3];
+ y[5] += tmp[1];
+ y[4] -= y[5];
+ y[0] += tmp[0];
+ y[1] = y[0] - y[1];
+ WRITE(x + 0*xstride, y[0]);
+ WRITE(x + 1*xstride, y[4]);
+ WRITE(x + 2*xstride, y[2]);
+ WRITE(x + 3*xstride, y[6]);
+ WRITE(x + 4*xstride, y[7]);
+ WRITE(x + 5*xstride, y[3]);
+ WRITE(x + 6*xstride, y[5]);
+ WRITE(x + 7*xstride, y[1]);
+}
+
+static av_always_inline void idct_1D_16(pixel *x, int xstride, pixel y[16])
+{
+ pixel tmp[8];
+ y[1] += (y[15]*13573 + 16384) >> 15;
+ y[15] -= (y[1]*11585 + 8192) >> 14;
+ y[1] += ((y[15]*13573 + 16384) >> 15)+y[7];
+ y[13] -= (y[3]*10947 + 8192) >> 14;
+ y[3] += (y[13]*15137 + 8192) >> 14;
+ y[5] += (y[11]*10947 + 8192) >> 14;
+ y[11] -= (y[5]*15137 + 8192) >> 14;
+ y[5] += (y[11]*10947 + 8192) >> 14;
+ y[13] += y[5] - ((y[3]*21895 + 16384) >> 15);
+ y[15] = y[9] - y[15];
+ y[11] += y[3];
+ tmp[0] = DAALA_DCT_RSHIFT(y[15], 1);
+ y[9] -= tmp[0];
+ tmp[1] = DAALA_DCT_RSHIFT(y[11], 1);
+ y[3] += tmp[0] - tmp[1];
+ tmp[2] = DAALA_DCT_RSHIFT(y[1], 1);
+ y[7] = tmp[2] - y[7] + tmp[1];
+ tmp[3] = DAALA_DCT_RSHIFT(y[13], 1);
+ y[5] += tmp[2] - tmp[3];
+ y[9] = tmp[3] - y[9];
+ y[13] -= y[9];
+ y[15] = y[3] - y[15];
+ y[1] -= y[5] + ((y[15]*20055 + 16384) >> 15);
+ y[15] += (y[1]*23059 + 8192) >> 14;
+ y[1] -= (y[15]*5417 + 4096) >> 13;
+ y[11] = y[7] - y[11];
+ y[9] += (y[7]*14101 + 8192) >> 14;
+ y[7] += (y[9]*3363 + 4096) >> 13;
+ y[9] -= (y[7]*12905 + 8192) >> 14;
+ y[11] -= (y[13]*4379 + 8192) >> 14;
+ y[13] += (y[11]*20435 + 8192) >> 14;
+ y[11] -= (y[13]*17515 + 16384) >> 15;
+ y[3] += (y[5]*851 + 4096) >> 13;
+ y[5] += (y[3]*14699 + 8192) >> 14;
+ y[3] -= (y[5]*1035 + 1024) >> 11;
+ y[6] -= (y[10]*7335 + 16384) >> 15;
+ y[10] -= (y[6]*12873 + 8192) >> 14;
+ y[14] += (y[2]*2873 + 1024) >> 11;
+ y[2] += (y[14]*9041 + 16384) >> 15;
+ y[6] = DAALA_DCT_RSHIFT(y[2], 1) - y[6] - ((y[10]*8593 + 8192) >> 14);
+ y[14] = DAALA_DCT_RSHIFT(y[10], 1) - y[14] + ((y[2]*2275 + 1024) >> 11);
+ y[2] -= y[6];
+ y[10] -= y[14];
+ y[6] -= (y[10]*13573 + 16384) >> 15;
+ y[10] += (y[6]*11585 + 8192) >> 14;
+ y[6] -= (y[10]*13573 + 16384) >> 15;
+ y[12] += (y[4]*9147 + 4096) >> 13;
+ y[4] -= (y[12]*10703 + 8192) >> 14;
+ y[12] += (y[4]*23013 + 16384) >> 15;
+ y[8] = y[0] - y[8];
+ tmp[4] = DAALA_DCT_RSHIFT(y[8], 1);
+ y[0] -= tmp[4] - DAALA_DCT_RSHIFT(y[12], 1);
+ y[4] = tmp[4] - y[4];
+ y[8] += y[6] - y[4];
+ y[12] = y[0] - y[12] + y[14];
+ y[10] = y[4] - y[10];
+ y[2] = y[0] - y[2];
+ tmp[5] = DAALA_DCT_RSHIFT(y[12], 1);
+ y[14] = tmp[5] - y[14];
+ tmp[6] = DAALA_DCT_RSHIFT(y[10], 1);
+ y[4] -= tmp[6];
+ tmp[4] = DAALA_DCT_RSHIFT(y[8], 1);
+ y[6] = tmp[4] - y[6];
+ tmp[7] = DAALA_DCT_RSHIFT(y[2], 1);
+ y[0] -= tmp[7];
+ y[3] = tmp[7] - y[3];
+ y[6] += DAALA_DCT_RSHIFT(y[13], 1);
+ y[9] = tmp[6] - y[9];
+ y[14] += DAALA_DCT_RSHIFT(y[15], 1);
+ y[1] = tmp[5] - y[1];
+ y[4] += DAALA_DCT_RSHIFT(y[7], 1);
+ y[11] = tmp[4] - y[11];
+ y[0] += DAALA_DCT_RSHIFT(y[5], 1);
+ WRITE(x + 0*xstride, y[0] );
+ WRITE(x + 1*xstride, y[8] - y[11]);
+ WRITE(x + 2*xstride, y[4] );
+ WRITE(x + 3*xstride, y[12] - y[1]);
+ WRITE(x + 4*xstride, y[14] );
+ WRITE(x + 5*xstride, y[10] - y[9]);
+ WRITE(x + 6*xstride, y[6] );
+ WRITE(x + 7*xstride, y[2] - y[3]);
+ WRITE(x + 8*xstride, y[3] );
+ WRITE(x + 9*xstride, y[6] - y[13]);
+ WRITE(x + 10*xstride, y[9] );
+ WRITE(x + 11*xstride, y[14] - y[15]);
+ WRITE(x + 12*xstride, y[1] );
+ WRITE(x + 13*xstride, y[4] - y[7]);
+ WRITE(x + 14*xstride, y[11] );
+ WRITE(x + 15*xstride, y[0] - y[5]);
+}
+
+static av_always_inline void idct_1D_32(pixel *x, int xstride, pixel y[32])
+{
+ pixel tmp[25];
+ /* 8247/16384 ~= Tan[19*Pi/128] ~= 0.503357699799294 */
+ y[19] += (y[13]*8247 + 8192) >> 14;
+ /* 1645/2048 ~= Sin[19*Pi/64] ~= 0.803207531480645 */
+ y[13] -= (y[19]*1645 + 1024) >> 11;
+ /* 8247/16384 ~= Tan[19*Pi/128] ~= 0.503357699799294 */
+ y[19] += (y[13]*8247 + 8192) >> 14;
+ /* 10375/16384 ~= Tan[23*Pi/128] ~= 0.633243016177569 */
+ y[23] += (y[9]*10375 + 8192) >> 14;
+ /* 7405/8192 ~= Sin[23*Pi/64] ~= 0.903989293123443 */
+ y[9] -= (y[23]*7405 + 4096) >> 13;
+ /* 10375/16384 ~= Tan[23*Pi/128] ~= 0.633243016177569 */
+ y[23] += (y[9]*10375 + 8192) >> 14;
+ /* 11539/16384 ~= Tan[25*Pi/128] ~= 0.704279460865044 */
+ y[25] += (y[7]*11539 + 8192) >> 14;
+ /* 7713/8192 ~= Sin[25*Pi/64] ~= 0.941544065183021 */
+ y[7] -= (y[25]*7713 + 4096) >> 13;
+ /* 11539/16384 ~= Tan[25*Pi/128] ~= 0.704279460865044 */
+ y[25] += (y[7]*11539 + 8192) >> 14;
+ /* 9281/16384 ~= Tan[21*Pi/128] ~= 0.566493002730344 */
+ y[21] += (y[11]*9281 + 8192) >> 14;
+ /* 7027/8192 ~= Sin[21*Pi/64] ~= 0.857728610000272 */
+ y[11] -= (y[21]*7027 + 4096) >> 13;
+ /* 9281/16384 ~= Tan[21*Pi/128] ~= 0.566493002730344 */
+ y[21] += (y[11]*9281 + 8192) >> 14;
+ /* 6393/8192 ~= Tan[27*Pi/128] ~= 0.780407659653944 */
+ y[27] += (y[5]*6393 + 4096) >> 13;
+ /* 3973/4096 ~= Sin[27*Pi/64] ~= 0.970031253194544 */
+ y[5] -= (y[27]*3973 + 2048) >> 12;
+ /* 6393/8192 ~= Tan[27*Pi/128] ~= 0.780407659653944 */
+ y[27] += (y[5]*6393 + 4096) >> 13;
+ /* 7263/16384 ~= Tan[17*Pi/128] ~= 0.443269513890864 */
+ y[17] += (y[15]*7263 + 8192) >> 14;
+ /* 3035/4096 ~= Sin[17*Pi/64] ~= 0.740951125354959 */
+ y[15] -= (y[17]*3035 + 2048) >> 12;
+ /* 14525/32768 ~= Tan[17*Pi/128] ~= 0.443269513890864 */
+ y[17] += (y[15]*14525 + 16384) >> 15;
+ /* 2417/32768 ~= Tan[3*Pi/128] ~= 0.0737644315224493 */
+ y[29] -= (y[3]*2417 + 16384) >> 15;
+ /* 601/4096 ~= Sin[3*Pi/64] ~= 0.146730474455362 */
+ y[3] += (y[29]*601 + 2048) >> 12;
+ /* 2417/32768 ~= Tan[3*Pi/128] ~= 0.0737644315224493 */
+ y[29] -= (y[3]*2417 + 16384) >> 15;
+ /* 7799/8192 ~= Tan[31*Pi/128] ~= 0.952079146700925 */
+ y[31] += (y[1]*7799 + 4096) >> 13;
+ /* 4091/4096 ~= Sin[31*Pi/64] ~= 0.998795456205172 */
+ y[1] -= (y[31]*4091 + 2048) >> 12;
+ /* 7799/8192 ~= Tan[31*Pi/128] ~= 0.952079146700925 */
+ y[31] += (y[1]*7799 + 4096) >> 13;
+ y[15] = -y[15];
+ y[7] = -y[7];
+ y[11] = -y[11];
+ y[13] = -y[13];
+ y[5] = -y[5];
+ y[9] = -y[9];
+ y[1] = -y[1];
+ y[27] += y[21];
+ tmp[11] = DAALA_DCT_RSHIFT(y[27], 1);
+ y[21] = tmp[11] - y[21];
+ y[5] -= y[11];
+ tmp[8] = DAALA_DCT_RSHIFT(y[5], 1);
+ y[11] += tmp[8];
+ y[25] += y[23];
+ tmp[7] = DAALA_DCT_RSHIFT(y[25], 1);
+ y[23] -= tmp[7];
+ y[7] -= y[9];
+ tmp[12] = DAALA_DCT_RSHIFT(y[7], 1);
+ y[9] += tmp[12];
+ y[13] += y[29];
+ tmp[9] = DAALA_DCT_RSHIFT(y[13], 1);
+ y[29] -= tmp[9];
+ y[19] -= y[3];
+ tmp[10] = DAALA_DCT_RSHIFT(y[19], 1);
+ y[3] += tmp[10];
+ y[15] += y[1];
+ tmp[13] = DAALA_DCT_RSHIFT(y[15], 1);
+ y[1] -= tmp[13];
+ y[17] -= y[31];
+ tmp[16] = DAALA_DCT_RSHIFT(y[17], 1);
+ y[31] += tmp[16];
+ y[21] += tmp[9];
+ y[13] = y[21] - y[13];
+ y[11] -= tmp[10];
+ y[19] += y[11];
+ y[9] = tmp[16] - y[9];
+ y[17] = y[9] - y[17];
+ y[23] = tmp[13] - y[23];
+ y[15] -= y[23];
+ y[3] += tmp[11];
+ y[27] -= y[3];
+ y[29] -= tmp[8];
+ y[5] += y[29];
+ y[31] += tmp[7];
+ y[25] -= y[31];
+ y[1] -= tmp[12];
+ y[7] += y[1];
+ y[21] = -y[21];
+ /* 6723/8192 ~= Tan[7*Pi/32] ~= 0.820678790828660 */
+ y[9] += (y[23]*6723 + 4096) >> 13;
+ /* 16069/16384 ~= Sin[7*Pi/16] ~= 0.980785280403230 */
+ y[23] -= (y[9]*16069 + 8192) >> 14;
+ /* 6723/8192 ~= Tan[7*Pi/32] ~= 0.820678790828660 */
+ y[9] += (y[23]*6723 + 4096) >> 13;
+ /* 2485/8192 ~= Tan[3*Pi/32] ~= 0.303346683607342 */
+ y[11] -= (y[21]*2485 + 4096) >> 13;
+ /* 18205/32768 ~= Sin[3*Pi/16] ~= 0.555570233019602 */
+ y[21] += (y[11]*18205 + 16384) >> 15;
+ /* 2485/8192 ~= Tan[3*Pi/32] ~= 0.303346683607342 */
+ y[11] -= (y[21]*2485 + 4096) >> 13;
+ y[23] += y[11];
+ tmp[15] = DAALA_DCT_RSHIFT(y[23], 1);
+ y[11] -= tmp[15];
+ y[21] = y[9] - y[21];
+ y[9] -= DAALA_DCT_RSHIFT(y[21], 1);
+ /* 13573/16384 ~= 2*Tan[Pi/8] ~= 0.828427124746190 */
+ y[21] -= (y[11]*13573 + 8192) >> 14;
+ /* 11585/32768 ~= Sin[Pi/4]/2 ~= 0.353553390593274 */
+ y[11] += (y[21]*11585 + 16384) >> 15;
+ /* 13573/16384 ~= 2*Tan[Pi/8] ~= 0.828427124746190 */
+ y[21] -= (y[11]*13573 + 8192) >> 14;
+ /* 17515/32768 ~= Tan[5*Pi/32] ~= 0.534511135950792 */
+ y[13] -= (y[19]*17515 + 16384) >> 15;
+ /* 13623/16384 ~= Sin[5*Pi/16] ~= 0.831469612302545 */
+ y[19] += (y[13]*13623 + 8192) >> 14;
+ /* 17515/32768 ~= Tan[5*Pi/32]) ~= 0.534511135950792 */
+ y[13] -= (y[19]*17515 + 16384) >> 15;
+ /* 6723/8192 ~= Tan[7*Pi/32]) ~= 0.820678790828660 */
+ y[15] -= (y[17]*6723 + 4096) >> 13;
+ /* 16069/16384 ~= Sin[7*Pi/16] ~= 0.980785280403230 */
+ y[17] += (y[15]*16069 + 8192) >> 14;
+ /* 6723/8192 ~= Tan[7*Pi/32]) ~= 0.820678790828660 */
+ y[15] -= (y[17]*6723 + 4096) >> 13;
+ y[17] += y[19];
+ tmp[16] = DAALA_DCT_RSHIFT(y[17], 1);
+ y[19] = tmp[16] - y[19];
+ y[13] += y[15];
+ y[15] -= DAALA_DCT_RSHIFT(y[13], 1);
+ /* -19195/32768 ~= Tan[Pi/8] - Tan[Pi/4] ~= -0.585786437626905 */
+ y[13] -= (y[19]*19195 + 16384) >> 15;
+ /* 11585/16384 ~= Sin[Pi/4] ~= 0.707106781186548 */
+ y[19] -= (y[13]*11585 + 8192) >> 14;
+ /* 7489/8192 ~= Tan[Pi/8] + Tan[Pi/4]/2 ~= 0.914213562373095 */
+ y[13] += (y[19]*7489 + 4096) >> 13;
+ y[5] = y[25] - y[5];
+ y[25] = DAALA_DCT_RSHIFT(y[5], 1) - y[25];
+ y[7] += y[27];
+ y[27] = DAALA_DCT_RSHIFT(y[7], 1) - y[27];
+ y[7] = -y[7];
+ y[29] += y[1];
+ y[1] = DAALA_DCT_RSHIFT(y[29], 1) - y[1];
+ y[31] += y[3];
+ tmp[14] = DAALA_DCT_RSHIFT(y[31], 1);
+ y[3] = tmp[14] - y[3];
+ /* 4161/16384 ~= Tan[3*Pi/16] - Tan[Pi/8] ~= 0.253965075546204 */
+ y[7] += (y[25]*4161 + 8192) >> 14;
+ /* 15137/16384 ~= Sin[3*Pi/8] ~= 0.923879532511287 */
+ y[25] -= (y[7]*15137 + 8192) >> 14;
+ /* 14341/16384 ~= Tan[3*Pi/16] + Tan[Pi/8]/2 ~= 0.875285419105846 */
+ y[7] += (y[25]*14341 + 8192) >> 14;
+ /* 14341/16384 ~= Tan[3*Pi/16] + Tan[Pi/8]/2 ~= 0.875285419105846 */
+ y[27] -= (y[5]*14341 + 8192) >> 14;
+ /* 15137/16384 ~= Sin[3*Pi/8] ~= 0.923879532511287 */
+ y[5] += (y[27]*15137 + 8192) >> 14;
+ /* 4161/16384 ~= Tan[3*Pi/16] - Tan[Pi/8] ~= 0.253965075546204 */
+ y[27] -= (y[5]*4161 + 8192) >> 14;
+ /* 13573/16384 ~= 2*Tan[Pi/8] ~= 0.828427124746190 */
+ y[29] += (y[3]*13573 + 8192) >> 14;
+ /* 11585/32768 ~= Sin[Pi/4]/2 ~= 0.353553390593274 */
+ y[3] -= (y[29]*11585 + 16384) >> 15;
+ /* 13573/16384 ~= 2*Tan[Pi/8] ~= 0.828427124746190 */
+ y[29] += (y[3]*13573 + 8192) >> 14;
+ y[15] = -y[15];
+ y[11] = -y[11];
+ y[13] = -y[13];
+ y[5] = -y[5];
+ y[9] = -y[9];
+ /* 11725/32768 ~= Tan[7*Pi/64] ~= 0.357805721314524 */
+ y[14] += (y[18]*11725 + 16384) >> 15;
+ /* 5197/8192 ~= Sin[7*Pi/32] ~= 0.634393284163645 */
+ y[18] -= (y[14]*5197 + 4096) >> 13;
+ /* 11725/32768 ~= Tan[7*Pi/64] ~= 0.357805721314524 */
+ y[14] += (y[18]*11725 + 16384) >> 15;
+ /* 2455/4096 ~= Tan[11*Pi/64] ~= 0.599376933681924 */
+ y[22] += (y[10]*2455 + 2048) >> 12;
+ /* 7225/8192 ~= Sin[11*Pi/32] ~= 0.881921264348355 */
+ y[10] -= (y[22]*7225 + 4096) >> 13;
+ /* 2455/4096 ~= Tan[11*Pi/64] ~= 0.599376933681924 */
+ y[22] += (y[10]*2455 + 2048) >> 12;
+ /* 4861/32768 ~= Tan[3*Pi/64] ~= 0.148335987538347 */
+ y[6] += (y[26]*4861 + 16384) >> 15;
+ /* 1189/4096 ~= Sin[3*Pi/32] ~= 0.290284677254462 */
+ y[26] -= (y[6]*1189 + 2048) >> 12;
+ /* 4861/32768 ~= Tan[3*Pi/64] ~= 0.148335987538347 */
+ y[6] += (y[26]*4861 + 16384) >> 15;
+ /* 7425/8192 ~= Tan[15*Pi/64] ~= 0.906347169019147 */
+ y[30] += (y[2]*7425 + 4096) >> 13;
+ /* 8153/8192 ~= Sin[15*Pi/32] ~= 0.995184726672197 */
+ y[2] -= (y[30]*8153 + 4096) >> 13;
+ /* 7425/8192 ~= Tan[15*Pi/64] ~= 0.906347169019147 */
+ y[30] += (y[2]*7425 + 4096) >> 13;
+
+ y[2] = -y[2];
+ y[2] -= y[18];
+ tmp[0] = DAALA_DCT_RSHIFT(y[2], 1);
+ y[18] += tmp[0];
+ y[22] -= y[6];
+ tmp[2] = DAALA_DCT_RSHIFT(y[22], 1);
+ y[6] += tmp[2];
+ y[30] += y[14];
+ tmp[3] = DAALA_DCT_RSHIFT(y[30], 1);
+ y[14] -= tmp[3];
+ y[10] = y[26] - y[10];
+ tmp[1] = DAALA_DCT_RSHIFT(y[10], 1);
+ y[26] -= tmp[1];
+ y[14] += tmp[1];
+ y[10] = y[14] - y[10];
+ y[6] -= tmp[3];
+ y[30] += y[6];
+ y[18] += tmp[2];
+ y[22] = y[18] - y[22];
+ y[26] += tmp[0];
+ y[2] -= y[26];
+ /* 3259/16384 ~= Tan[Pi/16] ~= 0.198912367379658 */
+ y[14] += (y[18]*3259 + 8192) >> 14;
+ /* 3135/8192 ~= Sin[Pi/8] ~= 0.382683432365090 */
+ y[18] -= (y[14]*3135 + 4096) >> 13;
+ /* 3259/16384 ~= Tan[Pi/16] ~= 0.198912367379658 */
+ y[14] += (y[18]*3259 + 8192) >> 14;
+ /* 10947/16384 ~= Tan[3*Pi/16] ~= 0.668178637919299 */
+ y[10] += (y[22]*10947 + 8192) >> 14;
+ /* 15137/16384 ~= Sin[3*Pi/8] ~= 0.923879532511287 */
+ y[22] -= (y[10]*15137 + 8192) >> 14;
+ /* 21895/32768 ~= Tan[3*Pi/16] ~= 0.668178637919299 */
+ y[10] += (y[22]*21895 + 16384) >> 15;
+ /* 13573/32768 ~= Tan[Pi/8] ~= 0.414213562373095 */
+ y[6] += (y[26]*13573 + 16384) >> 15;
+ /* 11585/16384 ~= Sin[Pi/4] ~= 0.707106781186547 */
+ y[26] -= (y[6]*11585 + 8192) >> 14;
+ /* 13573/32768 ~= Tan[Pi/8] ~= 0.414213562373095 */
+ y[6] += (y[26]*13573 + 16384) >> 15;
+ /* 8757/16384 ~= Tan[5*Pi/32] ~= 0.534511135950792 */
+ y[12] -= (y[20]*8757 + 8192) >> 14;
+ /* 6811/8192 ~= Sin[5*Pi/16] ~= 0.831469612302545 */
+ y[20] += (y[12]*6811 + 4096) >> 13;
+ /* 8757/16384 ~= Tan[5*Pi/32] ~= 0.534511135950792 */
+ y[12] -= (y[20]*8757 + 8192) >> 14;
+ /* 6723/8192 ~= Tan[7*Pi/32] ~= 0.820678790828660 */
+ y[4] -= (y[28]*6723 + 4096) >> 13;
+ /* 8035/8192 ~= Sin[7*Pi/16] ~= 0.980785280403230 */
+ y[28] += (y[4]*8035 + 4096) >> 13;
+ /* 6723/8192 ~= Tan[7*Pi/32] ~= 0.820678790828660 */
+ y[4] -= (y[28]*6723 + 4096) >> 13;
+ y[28] += y[20];
+ tmp[4] = DAALA_DCT_RSHIFT(y[28], 1);
+ y[20] = tmp[4] - y[20];
+ y[12] += y[4];
+ y[4] -= DAALA_DCT_RSHIFT(y[12], 1);
+ /* -19195/32768 ~= Tan[Pi/8] - Tan[Pi/4] ~= -0.585786437626905 */
+ y[12] -= (y[20]*19195 + 16384) >> 15;
+ /* 11585/16384 ~= Sin[Pi/4] ~= 0.707106781186548 */
+ y[20] -= (y[12]*11585 + 8192) >> 14;
+ /* 7489/8192 ~= Tan[Pi/8] + Tan[Pi/4]/2 ~= 0.914213562373095 */
+ y[12] += (y[20]*7489 + 4096) >> 13;
+ /* 10947/16384 ~= Tan[3*Pi/16]) ~= 0.668178637919299 */
+ y[24] += (y[8]*10947 + 8192) >> 14;
+ /* 473/512 ~= Sin[3*Pi/8] ~= 0.923879532511287 */
+ y[8] -= (y[24]*473 + 256) >> 9;
+ /* 10947/16384 ~= Tan[3*Pi/16] ~= 0.668178637919299 */
+ y[24] += (y[8]*10947 + 8192) >> 14;
+ /* 3393/8192 ~= Tan[pi/8] ~= 0.414213562373095 */
+ y[0] += (y[16]*3393 + 4096) >> 13;
+ /* 5793/8192 ~= Sin[pi/4] ~= 0.707106781186547 */
+ y[16] -= (y[0]*5793 + 4096) >> 13;
+ /* 13573/32768 ~= Tan[pi/8] ~= 0.414213562373095 */
+ y[0] += (y[16]*13573 + 16384) >> 15;
+ y[16] = y[8] - y[16];
+ tmp[6] = DAALA_DCT_RSHIFT(y[16], 1);
+ y[8] = tmp[6] - y[8];
+ y[24] = y[0] - y[24];
+ tmp[5] = DAALA_DCT_RSHIFT(y[24], 1);
+ y[0] -= tmp[5];
+ y[4] = tmp[5] - y[4];
+ y[24] -= y[4];
+ y[8] += DAALA_DCT_RSHIFT(y[20], 1);
+ y[20] = y[8] - y[20];
+ y[12] = tmp[6] - y[12];
+ y[16] -= y[12];
+ y[0] += tmp[4];
+ y[28] = y[0] - y[28];
+ y[16] -= y[14];
+ tmp[24] = DAALA_DCT_RSHIFT(y[16], 1);
+ y[14] += tmp[24];
+ y[18] = y[12] - y[18];
+ tmp[20] = DAALA_DCT_RSHIFT(y[18], 1);
+ y[12] -= tmp[20];
+ y[20] -= y[10];
+ tmp[22] = DAALA_DCT_RSHIFT(y[20], 1);
+ y[10] += tmp[22];
+ y[22] = y[8] - y[22];
+ tmp[18] = DAALA_DCT_RSHIFT(y[22], 1);
+ y[8] -= tmp[18];
+ y[24] -= y[6];
+ tmp[23] = DAALA_DCT_RSHIFT(y[24], 1);
+ y[6] += tmp[23];
+ y[26] = y[4] - y[26];
+ tmp[19] = DAALA_DCT_RSHIFT(y[26], 1);
+ y[4] -= tmp[19];
+ y[28] -= y[2];
+ tmp[21] = DAALA_DCT_RSHIFT(y[28], 1);
+ y[2] += tmp[21];
+ y[30] = y[0] - y[30];
+ tmp[17] = DAALA_DCT_RSHIFT(y[30], 1);
+ y[0] -= tmp[17];
+ y[15] = tmp[24] - y[15];
+ y[16] -= y[15];
+ y[14] += tmp[16];
+ y[17] = y[14] - y[17];
+ y[13] = tmp[20] - y[13];
+ y[18] -= y[13];
+ y[12] += DAALA_DCT_RSHIFT(y[19], 1);
+ y[19] = y[12] - y[19];
+ y[11] = tmp[22] - y[11];
+ y[20] -= y[11];
+ y[10] += DAALA_DCT_RSHIFT(y[21], 1);
+ y[21] = y[10] - y[21];
+ y[9] = tmp[18] - y[9];
+ y[22] -= y[9];
+ y[8] += tmp[15];
+ y[23] = y[8] - y[23];
+ y[7] = tmp[23] - y[7];
+ y[24] -= y[7];
+ y[6] += DAALA_DCT_RSHIFT(y[25], 1);
+ y[25] = y[6] - y[25];
+ y[5] = tmp[19] - y[5];
+ y[26] -= y[5];
+ y[4] += DAALA_DCT_RSHIFT(y[27], 1);
+ y[27] = y[4] - y[27];
+ y[3] = tmp[21] - y[3];
+ y[28] -= y[3];
+ y[2] += DAALA_DCT_RSHIFT(y[29], 1);
+ y[29] = y[2] - y[29];
+ y[1] = tmp[17] - y[1];
+ y[30] -= y[1];
+ y[0] += tmp[14];
+ y[31] = y[0] - y[31];
+ WRITE(x + 0*xstride, y[0]);
+ WRITE(x + 1*xstride, y[16]);
+ WRITE(x + 2*xstride, y[8]);
+ WRITE(x + 3*xstride, y[24]);
+ WRITE(x + 4*xstride, y[4]);
+ WRITE(x + 5*xstride, y[20]);
+ WRITE(x + 6*xstride, y[12]);
+ WRITE(x + 7*xstride, y[28]);
+ WRITE(x + 8*xstride, y[2]);
+ WRITE(x + 9*xstride, y[18]);
+ WRITE(x + 10*xstride, y[10]);
+ WRITE(x + 11*xstride, y[26]);
+ WRITE(x + 12*xstride, y[6]);
+ WRITE(x + 13*xstride, y[22]);
+ WRITE(x + 14*xstride, y[14]);
+ WRITE(x + 15*xstride, y[30]);
+ WRITE(x + 16*xstride, y[1]);
+ WRITE(x + 17*xstride, y[17]);
+ WRITE(x + 18*xstride, y[9]);
+ WRITE(x + 19*xstride, y[25]);
+ WRITE(x + 20*xstride, y[5]);
+ WRITE(x + 21*xstride, y[21]);
+ WRITE(x + 22*xstride, y[13]);
+ WRITE(x + 23*xstride, y[29]);
+ WRITE(x + 24*xstride, y[3]);
+ WRITE(x + 25*xstride, y[19]);
+ WRITE(x + 26*xstride, y[11]);
+ WRITE(x + 27*xstride, y[27]);
+ WRITE(x + 28*xstride, y[7]);
+ WRITE(x + 29*xstride, y[23]);
+ WRITE(x + 30*xstride, y[15]);
+ WRITE(x + 31*xstride, y[31]);
+}
+
+static av_always_inline void idct_1D_64(pixel *x, int xstride, pixel y[64])
+{
+ pixel tmp[57];
+ /* 1301/2048 ~= (1/Sqrt[2] - Cos[61*Pi/128])/Sin[61*Pi/128] ~=
+ 0.6352634915376478 */
+ y[3] -= (y[61]*1301 + 1024) >> 11;
+ /* 5777/4096 ~= Sqrt[2]*Sin[61*Pi/128] ~= 1.4103816894602614 */
+ y[61] += (y[3]*5777 + 2048) >> 12;
+ /* 2753/4096 ~= (1/Sqrt[2] - Cos[61*Pi/128]/2)/Sin[61*Pi/128] ~=
+ 0.6721457072988726 */
+ y[3] -= (y[61]*2753 + 2048) >> 12;
+ /* -2513/32768 ~= (1/Sqrt[2] - Cos[29*Pi/128])/Sin[29*Pi/128] ~=
+ -0.07670567731102484 */
+ y[29] -= (y[35]*2513 + 16384) >> 15;
+ /* 7567/8192 ~= Sqrt[2]*Sin[29*Pi/128] ~= 0.9237258930790229 */
+ y[35] -= (y[29]*7567 + 4096) >> 13;
+ /* 515/1024 ~= (1/Sqrt[2] - Cos[29*Pi/128]/2)/Sin[29*Pi/128] ~=
+ 0.5029332763556925 */
+ y[29] += (y[35]*515 + 512) >> 10;
+ /* -6331/8192 ~= (1/Sqrt[2] - Cos[13*Pi/128])/Sin[13*Pi/128] ~=
+ -0.772825983107003 */
+ y[13] -= (y[51]*6331 + 4096) >> 13;
+ /* 1817/4096 ~= Sqrt[2]*Sin[13*Pi/128] ~= 0.4436129715409088 */
+ y[51] -= (y[13]*1817 + 2048) >> 12;
+ /* 1517/2048 ~= (1/Sqrt[2] - Cos[13*Pi/128]/2)/Sin[13*Pi/128] ~=
+ 0.7406956190518837 */
+ y[13] += (y[51]*1517 + 1024) >> 11;
+ /* -3391/8192 ~= (1/Sqrt[2] - Cos[19*Pi/128])/Sin[19*Pi/128] ~=
+ -0.41395202418930155 */
+ y[45] += (y[19]*3391 + 4096) >> 13;
+ /* 5209/8192 ~= Sqrt[2]*Sin[19*Pi/128] ~= 0.6358464401941452 */
+ y[19] += (y[45]*5209 + 4096) >> 13;
+ /* 2373/4096 ~= (1/Sqrt[2] - Cos[19*Pi/128]/2)/Sin[19*Pi/128] ~=
+ 0.5793773719823809 */
+ y[45] -= (y[19]*2373 + 2048) >> 12;
+ /* 8195/16384 ~= (1/Sqrt[2] - Cos[55*Pi/128])/Sin[55*Pi/128] ~=
+ 0.5001583229201391 */
+ y[9] -= (y[55]*8195 + 8192) >> 14;
+ /* 1413/1024 ~= Sqrt[2]*Sin[55*Pi/128] ~= 1.3798511851368045 */
+ y[55] += (y[9]*1413 + 512) >> 10;
+ /* 5017/8192 ~= (1/Sqrt[2] - Cos[55*Pi/128]/2)/Sin[55*Pi/128] ~=
+ 0.6124370775787037 */
+ y[9] -= (y[55]*5017 + 4096) >> 13;
+ /* -8437/32768 ~= (1/Sqrt[2] - Cos[23*Pi/128])/Sin[23*Pi/128] ~=
+ -0.2574717698598901 */
+ y[23] -= (y[41]*8437 + 16384) >> 15;
+ /* 3099/4096 ~= Sqrt[2]*Sin[23*Pi/128] ~= 0.7566008898816587 */
+ y[41] -= (y[23]*3099 + 2048) >> 12;
+ /* 4359/8192 ~= (1/Sqrt[2] - Cos[23*Pi/128]/2)/Sin[23*Pi/128] ~=
+ 0.5321145141202145 */
+ y[23] += (y[41]*4359 + 4096) >> 13;
+ /* -6287/32768 ~= (1/Sqrt[2] - Cos[25*Pi/128])/Sin[25*Pi/128] ~=
+ -0.19186603041023065 */
+ y[25] -= (y[39]*6287 + 16384) >> 15;
+ /* 6671/8192 ~= Sqrt[2]*Sin[25*Pi/128] ~= 0.8143157536286402 */
+ y[39] -= (y[25]*6671 + 4096) >> 13;
+ /* 1061/2048 ~= (1/Sqrt[2] - Cos[25*Pi/128]/2)/Sin[25*Pi/128] ~=
+ 0.5180794213368158 */
+ y[25] += (y[39]*1061 + 1024) >> 11;
+ /* 2229/4096 ~= (1/Sqrt[2] - Cos[57*Pi/128])/Sin[57*Pi/128] ~=
+ 0.5441561539205226 */
+ y[7] -= (y[57]*2229 + 2048) >> 12;
+ /* 5707/4096 ~= Sqrt[2]*Sin[57*Pi/128] ~= 1.3933930045694292 */
+ y[57] += (y[7]*5707 + 2048) >> 12;
+ /* 323/512 ~= (1/Sqrt[2] - Cos[57*Pi/128]/2)/Sin[57*Pi/128] ~=
+ 0.6309143839894504 */
+ y[7] -= (y[57]*323 + 256) >> 9;
+ /* -1971/2048 ~= (1/Sqrt[2] - Cos[11*Pi/128])/Sin[11*Pi/128] ~=
+ -0.9623434853244648 */
+ y[53] += (y[11]*1971 + 1024) >> 11;
+ /* 1545/4096 ~= Sqrt[2]*Sin[11*Pi/128] ~= 0.37718879887892737 */
+ y[11] += (y[53]*1545 + 2048) >> 12;
+ /* 3459/4096 ~= (1/Sqrt[2] - Cos[11*Pi/128]/2)/Sin[11*Pi/128] ~=
+ 0.8444243553292501 */
+ y[53] -= (y[11]*3459 + 2048) >> 12;
+ /* -5417/16384 ~= (1/Sqrt[2] - Cos[21*Pi/128])/Sin[21*Pi/128] ~=
+ -0.3306569439519963 */
+ y[21] -= (y[43]*5417 + 8192) >> 14;
+ /* 2855/4096 ~= Sqrt[2]*Sin[21*Pi/128] ~= 0.6970633083205415 */
+ y[43] -= (y[21]*2855 + 2048) >> 12;
+ /* 2261/4096 ~= (1/Sqrt[2] - Cos[21*Pi/128]/2)/Sin[21*Pi/128] ~=
+ 0.5519664910950994 */
+ y[21] += (y[43]*2261 + 2048) >> 12;
+ /* -4327/32768 ~= (1/Sqrt[2] - Cos[27*Pi/128])/Sin[27*Pi/128] ~=
+ -0.13204726103773165 */
+ y[27] -= (y[37]*4327 + 16384) >> 15;
+ /* 891/1024 ~= Sqrt[2]*Sin[27*Pi/128] ~= 0.8700688593994939 */
+ y[37] -= (y[27]*891 + 512) >> 10;
+ /* 4167/8192 ~= (1/Sqrt[2] - Cos[27*Pi/128]/2)/Sin[27*Pi/128] ~=
+ 0.5086435289805458 */
+ y[27] += (y[37]*4167 + 4096) >> 13;
+ /* 2413/4096 ~= (1/Sqrt[2] - Cos[59*Pi/128])/Sin[59*Pi/128] ~=
+ 0.5891266122920528 */
+ y[5] -= (y[59]*2413 + 2048) >> 12;
+ /* 5749/4096 ~= Sqrt[2]*Sin[59*Pi/128] ~= 1.4035780182072333 */
+ y[59] += (y[5]*5749 + 2048) >> 12;
+ /* 5331/8192 ~= (1/Sqrt[2] - Cos[59*Pi/128]/2)/Sin[59*Pi/128] ~=
+ 0.6507957303604222 */
+ y[5] -= (y[59]*5331 + 4096) >> 13;
+ /* -2571/4096 ~= (1/Sqrt[2] - Cos[15*Pi/128])/Sin[15*Pi/128] ~=
+ -0.6276441593165217 */
+ y[49] += (y[15]*2571 + 2048) >> 12;
+ /* 8339/32768 ~= Sqrt[2]*Sin[15*Pi/128] ~= 0.5089684416985407 */
+ y[15] += (y[49]*8339 + 8192) >> 14;
+ /* 5477/8192 ~= (1/Sqrt[2] - Cos[15*Pi/128]/2)/Sin[15*Pi/128] ~=
+ 0.6685570995525147 */
+ y[49] -= (y[15]*5477 + 4096) >> 13;
+ /* -8373/16384 ~= (1/Sqrt[2] - Cos[17*Pi/128])/Sin[17*Pi/128] ~=
+ -0.5110608601827629 */
+ y[17] -= (y[47]*8373 + 8192) >> 14;
+ /* 4695/8192 ~= Sqrt[2]*Sin[17*Pi/128] ~= 0.5730977622997507 */
+ y[47] -= (y[17]*4695 + 4096) >> 13;
+ /* 2527/4096 ~= (1/Sqrt[2] - Cos[17*Pi/128]/2)/Sin[17*Pi/128] ~=
+ 0.6169210657818165 */
+ y[17] += (y[47]*2527 + 2048) >> 12;
+ /* -815/32768 ~= (1/Sqrt[2] - Cos[31*Pi/128])/Sin[31*Pi/128] ~=
+ -0.02485756913896231 */
+ y[31] -= (y[33]*815 + 16384) >> 15;
+ /* 1997/2048 ~= Sqrt[2]*Sin[31*Pi/128] ~= 0.9751575901732918 */
+ y[33] -= (y[31]*1997 + 1024) >> 11;
+ /* 8197/16384 ~= (1/Sqrt[2] - Cos[31*Pi/128]/2)/Sin[31*Pi/128] ~=
+ 0.5003088539809675 */
+ y[31] += (y[33]*8197 + 8192) >> 14;
+ /* 5593/8192 ~= (1/Sqrt[2] - Cos[63*Pi/128])/Sin[63*Pi/128] ~=
+ 0.6827711905810085 */
+ y[1] -= (y[63]*5593 + 4096) >> 13;
+ /* 5791/4096 ~= Sqrt[2]*Sin[63*Pi/128] ~= 1.413787627688534 */
+ y[63] += (y[1]*5791 + 2048) >> 12;
+ /* 2847/4096 ~= (1/Sqrt[2] - Cos[63*Pi/128]/2)/Sin[63*Pi/128] ~=
+ 0.6950455016354713 */
+ y[1] -= (y[63]*2847 + 2048) >> 12;
+ y[7] = -y[7];
+ y[3] = -y[3];
+ y[5] = -y[5];
+ y[9] = -y[9];
+ y[7] -= DAALA_DCT_RSHIFT(y[39], 1);
+ y[39] += y[7];
+ y[25] -= DAALA_DCT_RSHIFT(y[57], 1);
+ y[57] += y[25];
+ y[9] -= DAALA_DCT_RSHIFT(y[41], 1);
+ y[41] += y[9];
+ y[23] -= DAALA_DCT_RSHIFT(y[55], 1);
+ y[55] += y[23];
+ y[17] -= DAALA_DCT_RSHIFT(y[15], 1);
+ y[15] += y[17];
+ y[49] -= DAALA_DCT_RSHIFT(y[47], 1);
+ y[47] += y[49];
+ y[1] += DAALA_DCT_RSHIFT(y[33], 1);
+ y[33] -= y[1];
+ y[31] -= DAALA_DCT_RSHIFT(y[63], 1);
+ y[63] += y[31];
+ y[29] -= DAALA_DCT_RSHIFT(y[61], 1);
+ y[61] += y[29];
+ y[3] -= DAALA_DCT_RSHIFT(y[35], 1);
+ y[35] += y[3];
+ y[45] += DAALA_DCT_RSHIFT(y[51], 1);
+ y[51] -= y[45];
+ y[13] += DAALA_DCT_RSHIFT(y[19], 1);
+ y[19] -= y[13];
+ y[5] -= DAALA_DCT_RSHIFT(y[37], 1);
+ y[37] += y[5];
+ y[27] -= DAALA_DCT_RSHIFT(y[59], 1);
+ y[59] += y[27];
+ y[21] -= DAALA_DCT_RSHIFT(y[11], 1);
+ y[11] += y[21];
+ y[53] -= DAALA_DCT_RSHIFT(y[43], 1);
+ y[43] += y[53];
+ y[45] -= y[29];
+ y[29] += DAALA_DCT_RSHIFT(y[45], 1);
+ y[19] -= y[35];
+ y[35] += DAALA_DCT_RSHIFT(y[19], 1);
+ y[37] += y[21];
+ y[21] -= DAALA_DCT_RSHIFT(y[37], 1);
+ y[27] += y[43];
+ y[43] -= DAALA_DCT_RSHIFT(y[27], 1);
+ y[25] += y[41];
+ y[41] -= DAALA_DCT_RSHIFT(y[25], 1);
+ y[39] += y[23];
+ y[23] -= DAALA_DCT_RSHIFT(y[39], 1);
+ y[47] -= y[31];
+ y[31] += DAALA_DCT_RSHIFT(y[47], 1);
+ y[17] -= y[33];
+ y[33] += DAALA_DCT_RSHIFT(y[17], 1);
+ y[9] += y[7];
+ tmp[18] = DAALA_DCT_RSHIFT(y[9], 1);
+ y[7] -= tmp[18];
+ y[55] -= y[57];
+ tmp[23] = DAALA_DCT_RSHIFT(y[55], 1);
+ y[57] += tmp[23];
+ y[63] += y[15];
+ tmp[24] = DAALA_DCT_RSHIFT(y[63], 1);
+ y[15] -= tmp[24];
+ y[1] += y[49];
+ tmp[17] = DAALA_DCT_RSHIFT(y[1], 1);
+ y[49] -= tmp[17];
+ y[3] -= y[51];
+ tmp[21] = DAALA_DCT_RSHIFT(y[3], 1);
+ y[51] += tmp[21];
+ y[61] += y[13];
+ tmp[20] = DAALA_DCT_RSHIFT(y[61], 1);
+ y[13] -= tmp[20];
+ y[11] -= y[59];
+ tmp[22] = DAALA_DCT_RSHIFT(y[11], 1);
+ y[59] += tmp[22];
+ y[53] += y[5];
+ tmp[19] = DAALA_DCT_RSHIFT(y[53], 1);
+ y[5] -= tmp[19];
+ y[43] = -y[43];
+ y[41] = -y[41];
+ /* 4861/32768 ~= Tan[3*Pi/64] ~= 0.14833598753834742 */
+ y[35] -= (y[29]*4861 + 16384) >> 15;
+ /* 1189/4096 ~= Sin[3*Pi/32] ~= 0.29028467725446233 */
+ y[29] += (y[35]*1189 + 2048) >> 12;
+ /* 4861/32768 ~= Tan[3*Pi/64] ~= 0.14833598753834742 */
+ y[35] -= (y[29]*4861 + 16384) >> 15;
+ /* 513/2048 ~= Tan[5*Pi/64] ~= 0.25048696019130545 */
+ y[37] -= (y[27]*513 + 1024) >> 11;
+ /* 7723/16384 ~= Sin[5*Pi/32] ~= 0.47139673682599764 */
+ y[27] += (y[37]*7723 + 8192) >> 14;
+ /* 513/2048 ~= Tan[5*Pi/64] ~= 0.25048696019130545 */
+ y[37] -= (y[27]*513 + 1024) >> 11;
+ /* 11725/32768 ~= Tan[7*Pi/64] ~= 0.3578057213145241 */
+ y[39] -= (y[25]*11725 + 16384) >> 15;
+ /* 5197/8192 ~= Sin[7*Pi/32] ~= 0.6343932841636455 */
+ y[25] += (y[39]*5197 + 4096) >> 13;
+ /* 11725/32768 ~= Tan[7*Pi/64] ~= 0.3578057213145241 */
+ y[39] -= (y[25]*11725 + 16384) >> 15;
+ /* 805/16384 ~= Tan[Pi/64] ~= 0.04912684976946793 */
+ y[33] -= (y[31]*805 + 8192) >> 14;
+ /* 803/8192 ~= Sin[Pi/32] ~= 0.0980171403295606 */
+ y[31] += (y[33]*803 + 4096) >> 13;
+ /* 805/16384 ~= Tan[Pi/64] ~= 0.04912684976946793 */
+ y[33] -= (y[31]*805 + 8192) >> 14;
+ /* 4861/32768 ~= Tan[3*Pi/64] ~= 0.14833598753834742 */
+ y[45] -= (y[19]*4861 + 16384) >> 15;
+ /* 1189/4096 ~= Sin[3*Pi/32] ~= 0.29028467725446233 */
+ y[19] += (y[45]*1189 + 2048) >> 12;
+ /* 4861/32768 ~= Tan[3*Pi/64] ~= 0.14833598753834742 */
+ y[45] -= (y[19]*4861 + 16384) >> 15;
+ /* 2455/4096 ~= Tan[11*Pi/64] ~= 0.5993769336819237 */
+ y[43] -= (y[21]*2455 + 2048) >> 12;
+ /* 14449/16384 ~= Sin[11*Pi/32] ~= 0.881921264348355 */
+ y[21] += (y[43]*14449 + 8192) >> 14;
+ /* 2455/4096 ~= Tan[11*Pi/64] ~= 0.5993769336819237 */
+ y[43] -= (y[21]*2455 + 2048) >> 12;
+ /* 11725/32768 ~= Tan[7*Pi/64] ~= 0.3578057213145241 */
+ y[23] -= (y[41]*11725 + 16384) >> 15;
+ /* 5197/8192 ~= Sin[7*Pi/32] ~= 0.6343932841636455 */
+ y[41] += (y[23]*5197 + 4096) >> 13;
+ /* 11725/32768 ~= Tan[7*Pi/64] ~= 0.3578057213145241 */
+ y[23] -= (y[41]*11725 + 16384) >> 15;
+ /* 805/16384 ~= Tan[Pi/64] ~= 0.04912684976946793 */
+ y[47] -= (y[17]*805 + 8192) >> 14;
+ /* 803/8192 ~= Sin[Pi/32] ~= 0.0980171403295606 */
+ y[17] += (y[47]*803 + 4096) >> 13;
+ /* 805/16384 ~= Tan[Pi/64] ~= 0.04912684976946793 */
+ y[47] -= (y[17]*805 + 8192) >> 14;
+ y[21] = -y[21];
+ y[45] = -y[45];
+ y[29] += DAALA_DCT_RSHIFT(y[27], 1);
+ y[27] -= y[29];
+ y[35] -= DAALA_DCT_RSHIFT(y[37], 1);
+ y[37] += y[35];
+ y[31] += DAALA_DCT_RSHIFT(y[25], 1);
+ y[25] -= y[31];
+ y[33] -= DAALA_DCT_RSHIFT(y[39], 1);
+ y[39] += y[33];
+ y[43] -= DAALA_DCT_RSHIFT(y[19], 1);
+ y[19] += y[43];
+ y[21] += DAALA_DCT_RSHIFT(y[45], 1);
+ y[45] -= y[21];
+ y[23] += DAALA_DCT_RSHIFT(y[17], 1);
+ y[17] -= y[23];
+ y[41] += DAALA_DCT_RSHIFT(y[47], 1);
+ y[47] -= y[41];
+ y[59] -= tmp[20];
+ y[61] += y[59];
+ y[5] += tmp[21];
+ y[3] -= y[5];
+ y[7] -= tmp[17];
+ y[1] += y[7];
+ y[57] -= tmp[24];
+ y[63] += y[57];
+ y[51] += tmp[22];
+ y[11] -= y[51];
+ y[13] += tmp[19];
+ y[53] -= y[13];
+ y[49] += tmp[23];
+ y[55] -= y[49];
+ y[15] += tmp[18];
+ y[9] -= y[15];
+ y[53] = -y[53];
+ /* 2485/8192 ~= Tan[3*Pi/32] ~= 0.303346683607342 */
+ y[51] -= (y[13]*2485 + 4096) >> 13;
+ /* 18205/32768 ~= Sin[3*Pi/16] ~= 0.555570233019602 */
+ y[13] += (y[51]*18205 + 16384) >> 15;
+ /* 2485/8192 ~= Tan[3*Pi/32] ~= 0.303346683607342 */
+ y[51] -= (y[13]*2485 + 4096) >> 13;
+ /* 3227/32768 ~= Tan[Pi/32] ~= 0.09849140335716425 */
+ y[49] -= (y[15]*3227 + 16384) >> 15;
+ /* 6393/32768 ~= Sin[Pi/16] ~= 0.19509032201612825 */
+ y[15] += (y[49]*6393 + 16384) >> 15;
+ /* 3227/32768 ~= Tan[Pi/32] ~= 0.09849140335716425 */
+ y[49] -= (y[15]*3227 + 16384) >> 15;
+ /* 17515/32768 ~= Tan[5*Pi/32] ~= 0.5345111359507916 */
+ y[53] -= (y[11]*17515 + 16384) >> 15;
+ /* 13623/16384 ~= Sin[5*Pi/16] ~= 0.8314696123025452 */
+ y[11] += (y[53]*13623 + 8192) >> 14;
+ /* 17515/32768 ~= Tan[5*Pi/32] ~= 0.5345111359507916 */
+ y[53] -= (y[11]*17515 + 16384) >> 15;
+ /* 6723/8192 ~= Tan[7*Pi/32] ~= 0.8206787908286602 */
+ y[55] -= (y[9]*6723 + 4096) >> 13;
+ /* 16069/16384 ~= Sin[7*Pi/16] ~= 0.9807852804032304 */
+ y[9] += (y[55]*16069 + 8192) >> 14;
+ /* 6723/8192 ~= Tan[7*Pi/32] ~= 0.8206787908286602 */
+ y[55] -= (y[9]*6723 + 4096) >> 13;
+ y[55] = -y[55];
+ y[25] += y[37];
+ y[37] -= DAALA_DCT_RSHIFT(y[25], 1);
+ y[27] -= y[39];
+ y[39] += DAALA_DCT_RSHIFT(y[27], 1);
+ y[29] -= y[31];
+ y[31] += DAALA_DCT_RSHIFT(y[29], 1);
+ y[33] -= y[35];
+ y[35] += DAALA_DCT_RSHIFT(y[33], 1); /* pass */
+ y[23] -= y[21];
+ y[21] += DAALA_DCT_RSHIFT(y[23], 1);
+ y[43] += y[41];
+ y[41] -= DAALA_DCT_RSHIFT(y[43], 1);
+ y[19] += y[17];
+ y[17] -= DAALA_DCT_RSHIFT(y[19], 1);
+ y[47] -= y[45];
+ y[45] += DAALA_DCT_RSHIFT(y[47], 1); /* pass */
+ y[7] += y[59];
+ y[59] -= DAALA_DCT_RSHIFT(y[7], 1);
+ y[5] -= y[57];
+ y[57] += DAALA_DCT_RSHIFT(y[5], 1);
+ y[3] -= y[1];
+ y[1] += DAALA_DCT_RSHIFT(y[3], 1);
+ y[63] += y[61];
+ y[61] -= DAALA_DCT_RSHIFT(y[63], 1); /* pass */
+ y[13] -= y[15];
+ y[15] += DAALA_DCT_RSHIFT(y[13], 1); /* pass */
+ y[49] -= y[51];
+ y[51] += DAALA_DCT_RSHIFT(y[49], 1);
+ y[55] -= y[11];
+ y[11] += DAALA_DCT_RSHIFT(y[55], 1); /* pass */
+ y[53] -= y[9];
+ y[9] += DAALA_DCT_RSHIFT(y[53], 1);
+ y[31] = -y[31];
+ y[15] = -y[15];
+ y[7] = -y[7];
+ y[59] = -y[59];
+ y[61] = -y[61];
+ y[37] = -y[37];
+ y[57] = -y[57];
+ /* 14341/16384 ~= Tan[3*Pi/16] + Tan[Pi/8]/2 ~= 0.875285419105846 */
+ y[37] -= (y[27]*14341 + 8192) >> 14;
+ /* 15137/16384 ~= Sin[3*Pi/8] ~= 0.923879532511287 */
+ y[27] += (y[37]*15137 + 8192) >> 14;
+ /* 4161/16384 ~= Tan[3*Pi/16] - Tan[Pi/8] ~= 0.253965075546204 */
+ y[37] -= (y[27]*4161 + 8192) >> 14;
+ /* 4161/16384 ~= Tan[3*Pi/16] - Tan[Pi/8] ~= 0.253965075546204 */
+ y[25] -= (y[39]*4161 + 8192) >> 14;
+ /* 15137/16384 ~= Sin[3*Pi/8] ~= 0.923879532511287 */
+ y[39] += (y[25]*15137 + 8192) >> 14;
+ /* 28681/32768 ~= Tan[3*Pi/16] + Tan[Pi/8]/2 ~= 0.875285419105846 */
+ y[25] -= (y[39]*28681 + 16384) >> 15;
+ /* -19195/32768 ~= Tan[Pi/8] - Tan[Pi/4] ~= -0.585786437626905 */
+ y[29] += (y[35]*19195 + 16384) >> 15;
+ /* 11585/16384 ~= Sin[Pi/4] ~= 0.707106781186548 */
+ y[35] += (y[29]*11585 + 8192) >> 14;
+ /* 29957/32768 ~= Tan[Pi/8] + Tan[Pi/4]/2 ~= 0.914213562373095 */
+ y[29] -= (y[35]*29957 + 16384) >> 15;
+ /* 14341/16384 ~= Tan[3*Pi/16] + Tan[Pi/8]/2 ~= 0.875285419105846 */
+ y[41] -= (y[23]*14341 + 8192) >> 14;
+ /* 15137/16384 ~= Sin[3*Pi/8] ~= 0.923879532511287 */
+ y[23] += (y[41]*15137 + 8192) >> 14;
+ /* 4161/16384 ~= Tan[3*Pi/16] - Tan[Pi/8] ~= 0.253965075546204 */
+ y[41] -= (y[23]*4161 + 8192) >> 14;
+ /* 3259/8192 ~= 2*Tan[Pi/16] ~= 0.397824734759316 */
+ y[43] -= (y[21]*3259 + 4096) >> 13;
+ /* 3135/16384 ~= Sin[Pi/8]/2 ~= 0.1913417161825449 */
+ y[21] += (y[43]*3135 + 8192) >> 14;
+ /* 3259/8192 ~= 2*Tan[Pi/16] ~= 0.397824734759316 */
+ y[43] -= (y[21]*3259 + 4096) >> 13;
+ /* 7489/8192 ~= Tan[Pi/8] + Tan[Pi/4]/2 ~= 0.914213562373095 */
+ y[45] -= (y[19]*7489 + 4096) >> 13;
+ /* 11585/16384 ~= Sin[Pi/4] ~= 0.707106781186548 */
+ y[19] += (y[45]*11585 + 8192) >> 14;
+ /* -19195/32768 ~= Tan[Pi/8] - Tan[Pi/4] ~= -0.585786437626905 */
+ y[45] += (y[19]*19195 + 16384) >> 15;
+ /* 14341/16384 ~= Tan[3*Pi/16] + Tan[Pi/8]/2 ~= 0.875285419105846 */
+ y[57] -= (y[7]*14341 + 8192) >> 14;
+ /* 15137/16384 ~= Sin[3*Pi/8] ~= 0.923879532511287 */
+ y[7] += (y[57]*15137 + 8192) >> 14;
+ /* 4161/16384 ~= Tan[3*Pi/16] - Tan[Pi/8] ~= 0.253965075546204 */
+ y[57] -= (y[7]*4161 + 8192) >> 14;
+ /* 4161/16384 ~= Tan[3*Pi/16] - Tan[Pi/8] ~= 0.253965075546204 */
+ y[5] -= (y[59]*4161 + 8192) >> 14;
+ /* 15137/16384 ~= Sin[3*Pi/8] ~= 0.923879532511287 */
+ y[59] += (y[5]*15137 + 8192) >> 14;
+ /* 28681/32768 ~= Tan[3*Pi/16] + Tan[Pi/8]/2 ~= 0.875285419105846 */
+ y[5] -= (y[59]*28681 + 16384) >> 15;
+ /* -19195/32768 ~= Tan[Pi/8] - Tan[Pi/4] ~= -0.585786437626905 */
+ y[3] += (y[61]*19195 + 16384) >> 15;
+ /* 11585/16384 ~= Sin[Pi/4] ~= 0.707106781186548 */
+ y[61] += (y[3]*11585 + 8192) >> 14;
+ /* 29957/32768 ~= Tan[Pi/8] + Tan[Pi/4]/2 ~= 0.914213562373095 */
+ y[3] -= (y[61]*29957 + 16384) >> 15;
+ /* -19195/32768 ~= Tan[Pi/8] - Tan[Pi/4] ~= -0.585786437626905 */
+ y[13] += (y[51]*19195 + 16384) >> 15;
+ /* 11585/16384 ~= Sin[Pi/4] ~= 0.707106781186548 */
+ y[51] += (y[13]*11585 + 8192) >> 14;
+ /* 29957/32768 ~= Tan[Pi/8] + Tan[Pi/4]/2 ~= 0.914213562373095 */
+ y[13] -= (y[51]*29957 + 16384) >> 15;
+ /* 13573/16384 ~= 2*Tan[Pi/8] ~= 0.828427124746190 */
+ y[53] += (y[11]*13573 + 8192) >> 14;
+ /* 11585/32768 ~= Sin[Pi/4]/2 ~= 0.353553390593274 */
+ y[11] -= (y[53]*11585 + 16384) >> 15;
+ /* 13573/16384 ~= 2*Tan[Pi/8] ~= 0.828427124746190 */
+ y[53] += (y[11]*13573 + 8192) >> 14;
+ y[3] = -y[3];
+ /* 1651/32768 ~= (1/Sqrt[2] - Cos[15*Pi/64])/Sin[15*Pi/64] ~=
+ 0.05039668360333519 */
+ y[34] += (y[30]*1651 + 16384) >> 15;
+ /* 1945/2048 ~= Sqrt[2]*Sin[15*Pi/64] ~= 0.9497277818777543 */
+ y[30] += (y[34]*1945 + 1024) >> 11;
+ /* 2053/4096 ~= (1/Sqrt[2] - Cos[15*Pi/64]/2)/Sin[15*Pi/64] ~=
+ 0.5012683042634027 */
+ y[34] -= (y[30]*2053 + 2048) >> 12;
+ /* 4545/32768 ~= (1/Sqrt[2] - Cos[19*Pi/64])/Sin[19*Pi/64] ~=
+ 0.13870322715817154 */
+ y[38] += (y[26]*4545 + 16384) >> 15;
+ /* 4653/32768 ~= Sqrt[2]*Sin[19*Pi/64] ~= 1.1359069844201428 */
+ y[26] -= (y[38]*4653 + 2048) >> 12;
+ /* 2087/4096 ~= (1/Sqrt[2] - Cos[19*Pi/64]/2)/Sin[19*Pi/64] ~=
+ 0.5095285002941893 */
+ y[38] += (y[26]*2087 + 2048) >> 12;
+ /* 75/256 ~= (1/Sqrt[2] - Cos[11*Pi/64])/Sin[11*Pi/64] ~=
+ 0.2929800132658202 */
+ y[22] -= (y[42]*75 + 128) >> 8;
+ /* 1489/2048 ~= Sqrt[2]*Sin[11*Pi/64] ~= 0.72705107329128 */
+ y[42] -= (y[22]*1489 + 1024) >> 11;
+ /* 2217/4096 ~= (1/Sqrt[2] - Cos[11*Pi/64]/2)/Sin[11*Pi/64] ~=
+ 0.5412195895259334 */
+ y[22] += (y[42]*2217 + 2048) >> 12;
+ /* 5067/16384 ~= (1/Sqrt[2] - Cos[23*Pi/64])/Sin[23*Pi/64] ~=
+ 0.30924225528198984 */
+ y[18] -= (y[46]*5067 + 8192) >> 14;
+ /* 1309/1024 ~= Sqrt[2]*Sin[23*Pi/64] ~= 1.278433918575241 */
+ y[46] += (y[18]*1309 + 512) >> 10;
+ /* 4471/8192 ~= (1/Sqrt[2] - Cos[23*Pi/64]/2)/Sin[23*Pi/64] ~=
+ 0.5457246432276498 */
+ y[18] -= (y[46]*4471 + 4096) >> 13;
+ /* 5701/8192 ~= (1/Sqrt[2] - Cos[7*Pi/64])/Sin[7*Pi/64] ~=
+ 0.6958870433047222 */
+ y[14] -= (y[50]*5701 + 4096) >> 13;
+ /* 3903/8192 ~= Sqrt[2]*Sin[7*Pi/64] ~= 0.47643419969316125 */
+ y[50] -= (y[14]*3903 + 4096) >> 13;
+ /* 5747/8192 ~= (1/Sqrt[2] - Cos[7*Pi/64]/2)/Sin[7*Pi/64] ~=
+ 0.7015193429405162 */
+ y[14] += (y[50]*5747 + 4096) >> 13;
+ /* 7839/16384 ~= (1/Sqrt[2] - Cos[27*Pi/64])/Sin[27*Pi/64] ~=
+ 0.47846561618999817 */
+ y[10] -= (y[54]*7839 + 8192) >> 14;
+ /* 5619/4096 ~= Sqrt[2]*Sin[27*Pi/64] ~= 1.371831354193494 */
+ y[54] += (y[10]*5619 + 2048) >> 12;
+ /* 2473/4096 ~= (1/Sqrt[2] - Cos[27*Pi/64]/2)/Sin[27*Pi/64] ~=
+ 0.603709096285651 */
+ y[10] -= (y[54]*2473 + 2048) >> 12;
+ /* 4641/8192 ~= (1/Sqrt[2] - Cos[29*Pi/64])/Sin[29*Pi/64] ~=
+ 0.5665078993345056 */
+ y[6] -= (y[58]*4641 + 4096) >> 13;
+ /* 2865/2048 ~= Sqrt[2]*Sin[29*Pi/64] ~= 1.3989068359730783 */
+ y[58] += (y[6]*2865 + 1024) >> 11;
+ /* 41/64 ~= (1/Sqrt[2] - Cos[29*Pi/64]/2)/Sin[29*Pi/64] ~=
+ 0.6406758931036793 */
+ y[6] -= (y[58]*41 + 32) >> 6;
+ /* 5397/8192 ~= (Cos[Pi/4] - Cos[31*Pi/64])/Sin[31*Pi/64] ~=
+ 0.6588326996993819 */
+ y[62] += (y[2]*5397 + 4096) >> 13;
+ /* 2893/2048 ~= Sqrt[2]*Sin[31*Pi/64] ~= 1.4125100802019777 */
+ y[2] -= (y[62]*2893 + 1024) >> 11;
+ /* 2799/4096 ~= (1/Sqrt[2] - Cos[31*Pi/64]/2)/Sin[31*Pi/64] ~=
+ 0.6833961245841154 */
+ y[62] += (y[2]*2799 + 2048) >> 12;
+ y[18] -= DAALA_DCT_RSHIFT(y[50], 1);
+ y[50] += y[18];
+ y[14] += DAALA_DCT_RSHIFT(y[46], 1);
+ y[46] = y[14] - y[46];
+ y[10] += DAALA_DCT_RSHIFT(y[42], 1);
+ y[42] -= y[10];
+ y[22] = DAALA_DCT_RSHIFT(y[54], 1) - y[22];
+ y[54] -= y[22];
+ y[6] = DAALA_DCT_RSHIFT(y[26], 1) - y[6];
+ y[26] -= y[6];
+ y[38] -= DAALA_DCT_RSHIFT(y[58], 1);
+ y[58] += y[38];
+ y[34] = DAALA_DCT_RSHIFT(y[2], 1) - y[34];
+ y[2] -= y[34];
+ y[62] += DAALA_DCT_RSHIFT(y[30], 1);
+ y[30] -= y[62];
+ y[22] -= y[26];
+ y[26] += DAALA_DCT_RSHIFT(y[22], 1);
+ y[42] = y[38] - y[42];
+ y[38] -= DAALA_DCT_RSHIFT(y[42], 1);
+ y[34] += y[46];
+ y[46] -= DAALA_DCT_RSHIFT(y[34], 1);
+ y[30] = y[18] - y[30];
+ y[18] -= DAALA_DCT_RSHIFT(y[30], 1);
+ y[62] += y[14];
+ tmp[16] = DAALA_DCT_RSHIFT(y[62], 1);
+ y[14] = tmp[16] - y[14];
+ y[2] += y[50];
+ tmp[13] = DAALA_DCT_RSHIFT(y[2], 1);
+ y[50] -= tmp[13];
+ y[10] = y[6] - y[10];
+ tmp[14] = DAALA_DCT_RSHIFT(y[10], 1);
+ y[6] -= tmp[14];
+ y[54] -= y[58];
+ tmp[15] = DAALA_DCT_RSHIFT(y[54], 1);
+ y[58] += tmp[15];
+ /* 3227/32768 ~= Tan[Pi/32] ~= 0.09849140335716425 */
+ y[34] -= (y[30]*3227 + 16384) >> 15;
+ /* 6393/32768 ~= Sin[Pi/16] ~= 0.19509032201612825 */
+ y[30] += (y[34]*6393 + 16384) >> 15;
+ /* 3227/32768 ~= Tan[Pi/32] ~= 0.09849140335716425 */
+ y[34] -= (y[30]*3227 + 16384) >> 15;
+ /* 2485/8192 ~= Tan[3*Pi/32] ~= 0.303346683607342 */
+ y[38] -= (y[26]*2485 + 4096) >> 13;
+ /* 18205/32768 ~= Sin[3*Pi/16] ~= 0.555570233019602 */
+ y[26] += (y[38]*18205 + 16384) >> 15;
+ /* 2485/8192 ~= Tan[3*Pi/32] ~= 0.303346683607342 */
+ y[38] -= (y[26]*2485 + 4096) >> 13;
+ /* 8757/16384 ~= Tan[5*Pi/32] ~= 0.534511135950792 */
+ y[22] -= (y[42]*8757 + 8192) >> 14;
+ /* 6811/8192 ~= Sin[5*Pi/16] ~= 0.831469612302545 */
+ y[42] += (y[22]*6811 + 4096) >> 13;
+ /* 8757/16384 ~= Tan[5*Pi/32]) ~= 0.534511135950792 */
+ y[22] -= (y[42]*8757 + 8192) >> 14;
+ /* 6723/8192 ~= Tan[7*Pi/32]) ~= 0.820678790828660 */
+ y[46] -= (y[18]*6723 + 4096) >> 13;
+ /* 16069/16384 ~= Sin[7*Pi/16] ~= 0.980785280403230 */
+ y[18] += (y[46]*16069 + 8192) >> 14;
+ /* 6723/8192 ~= Tan[7*Pi/32] ~= 0.820678790828660 */
+ y[46] -= (y[18]*6723 + 4096) >> 13;
+ y[26] += DAALA_DCT_RSHIFT(y[34], 1);
+ y[34] = y[26] - y[34];
+ y[38] += DAALA_DCT_RSHIFT(y[30], 1);
+ y[30] -= y[38];
+ y[18] = DAALA_DCT_RSHIFT(y[42], 1) - y[18];
+ y[42] -= y[18];
+ y[46] += DAALA_DCT_RSHIFT(y[22], 1);
+ y[22] = y[46] - y[22];
+ y[14] -= tmp[14];
+ y[10] += y[14];
+ y[50] += tmp[15];
+ y[54] = y[50] - y[54];
+ y[58] = tmp[16] - y[58];
+ y[62] -= y[58];
+ y[6] = tmp[13] - y[6];
+ y[2] -= y[6];
+ /* 13573/32768 ~= Tan[pi/8] ~= 0.414213562373095 */
+ y[38] -= (y[26]*13573 + 16384) >> 15;
+ /* 11585/16384 ~= Sin[pi/4] ~= 0.707106781186547 */
+ y[26] += (y[38]*11585 + 8192) >> 14;
+ /* 13573/32768 ~= Tan[pi/8] ~= 0.414213562373095 */
+ y[38] -= (y[26]*13573 + 16384) >> 15;
+ /* 13573/32768 ~= Tan[pi/8] ~= 0.414213562373095 */
+ y[22] -= (y[42]*13573 + 16384) >> 15;
+ /* 11585/16384 ~= Sin[pi/4] ~= 0.707106781186547 */
+ y[42] += (y[22]*11585 + 8192) >> 14;
+ /* 13573/32768 ~= Tan[pi/8] ~= 0.414213562373095 */
+ y[22] -= (y[42]*13573 + 16384) >> 15;
+ /* 3259/16384 ~= Tan[Pi/16] ~= 0.198912367379658 */
+ y[14] -= (y[50]*3259 + 8192) >> 14;
+ /* 3135/8192 ~= Sin[Pi/8] ~= 0.382683432365090 */
+ y[50] += (y[14]*3135 + 4096) >> 13;
+ /* 3259/16384 ~= Tan[Pi/16] ~= 0.198912367379658 */
+ y[14] -= (y[50]*3259 + 8192) >> 14;
+ /* 21895/32768 ~= Tan[3*Pi/16] ~= 0.668178637919299 */
+ y[10] -= (y[54]*21895 + 16384) >> 15;
+ /* 15137/16384 ~= Sin[3*Pi/8] ~= 0.923879532511287 */
+ y[54] += (y[10]*15137 + 8192) >> 14;
+ /* 21895/32768 ~= Tan[3*Pi/16] ~= 0.668178637919299 */
+ y[10] -= (y[54]*21895 + 16384) >> 15;
+ /* 13573/32768 ~= Tan[pi/8] ~= 0.414213562373095 */
+ y[58] -= (y[6]*13573 + 16384) >> 15;
+ /* 11585/16384 ~= Sin[pi/4] ~= 0.707106781186547 */
+ y[6] += (y[58]*11585 + 8192) >> 14;
+ /* 13573/32768 ~= Tan[pi/8] ~= 0.414213562373095 */
+ y[58] -= (y[6]*13573 + 16384) >> 15;
+ /* 3393/8192 ~= Tan[Pi/8] ~= 0.414213562373095 */
+ y[36] -= (y[28]*3393 + 4096) >> 13;
+ /* 5793/8192 ~= Sin[Pi/4] ~= 0.707106781186547 */
+ y[28] += (y[36]*5793 + 4096) >> 13;
+ /* 3393/8192 ~= Tan[Pi/8] ~= 0.414213562373095 */
+ y[36] -= (y[28]*3393 + 4096) >> 13;
+ /* 2737/4096 ~= Tan[3*Pi/16] ~= 0.668178637919299 */
+ y[20] -= (y[44]*2737 + 2048) >> 12;
+ /* 473/512 ~= Sin[3*Pi/8] ~= 0.923879532511287 */
+ y[44] += (y[20]*473 + 256) >> 9;
+ /* 2737/4096 ~= Tan[3*Pi/16] ~= 0.668178637919299 */
+ y[20] -= (y[44]*2737 + 2048) >> 12;
+ /* 3259/16384 ~= Tan[Pi/16] ~= 0.198912367379658 */
+ y[52] -= (y[12]*3259 + 8192) >> 14;
+ /* 3135/8192 ~= Sin[Pi/8] ~= 0.382683432365090 */
+ y[12] += (y[52]*3135 + 4096) >> 13;
+ /* 3259/16384 ~= Tan[Pi/16] ~= 0.198912367379658 */
+ y[52] -= (y[12]*3259 + 8192) >> 14;
+ y[60] -= y[36];
+ tmp[7] = DAALA_DCT_RSHIFT(y[60], 1);
+ y[36] += tmp[7];
+ y[44] = y[12] - y[44];
+ tmp[6] = DAALA_DCT_RSHIFT(y[44], 1);
+ y[12] -= tmp[6];
+ y[20] = y[52] - y[20];
+ tmp[5] = DAALA_DCT_RSHIFT(y[20], 1);
+ y[52] -= tmp[5];
+ y[4] += y[28];
+ tmp[4] = DAALA_DCT_RSHIFT(y[4], 1);
+ y[28] = tmp[4] - y[28];
+ y[12] = tmp[4] - y[12];
+ y[4] -= y[12];
+ y[28] -= tmp[5];
+ y[20] += y[28];
+ y[36] -= tmp[6];
+ y[44] += y[36];
+ y[52] += tmp[7];
+ y[60] -= y[52];
+ /* 20055/32768 ~= (1/Sqrt[2] - Sin[Pi/32])/Cos[Pi/32] ~=
+ 0.612036765167935 */
+ y[4] -= (y[60]*20055 + 16384) >> 15;
+ /* 11529/8192 ~= Sqrt[2]*Cos[Pi/32] ~= 1.40740373752638 */
+ y[60] += (y[4]*11529 + 4096) >> 13;
+ /* 5417/8192 ~= (Sqrt[2] - Sin[Pi/32])/(2*Cos[Pi/32]) ~=
+ 0.661282466846517 */
+ y[4] -= (y[60]*5417 + 4096) >> 13;
+ /* 3525/4096 ~= (Cos[3*Pi/32] - 1/Sqrt[2])/Sin[3*Pi/32] ~=
+ 0.860650162139486 */
+ y[28] += (y[36]*3525 + 2048) >> 12;
+ /* 3363/8192 ~= Sqrt[2]*Sin[3*Pi/32] ~= 0.410524527522357 */
+ y[36] += (y[28]*3363 + 4096) >> 13;
+ /* 12905/16384 ~= (1/Sqrt[2] - Cos[3*Pi/32]/1)/Sin[3*Pi/32] ~=
+ 0.787628942329675 */
+ y[28] -= (y[36]*12905 + 8192) >> 14;
+ /* 4379/16384 ~= (1/Sqrt[2] - Sin[5*Pi/32])/Cos[5*Pi/32] ~=
+ 0.267268807193026 */
+ y[20] -= (y[44]*4379 + 8192) >> 14;
+ /* 10217/8192 ~= Sqrt[2]*Cos[5*Pi/32] ~= 1.24722501298667 */
+ y[44] += (y[20]*10217 + 4096) >> 13;
+ /* 4379/8192 ~= (Sqrt[2] - Sin[5*Pi/32])/(2*Cos[5*Pi/32]) ~=
+ 0.534524375168421 */
+ y[20] -= (y[44]*4379 + 4096) >> 13;
+ /* 851/8192 ~= (Cos[7*Pi/32] - 1/Sqrt[2])/Sin[7*Pi/32] ~=
+ 0.103884567856159 */
+ y[12] += (y[52]*851 + 4096) >> 13;
+ /* 14699/16384 ~= Sqrt[2]*Sin[7*Pi/32] ~= 0.897167586342636 */
+ y[52] += (y[12]*14699 + 8192) >> 14;
+ /* 1035/2048 ~= (Sqrt[2] - Cos[7*Pi/32])/(2*Sin[7*Pi/32]) ~=
+ 0.505367194937830 */
+ y[12] -= (y[52]*1035 + 1024) >> 11;
+ /* 8553/16384 ~= (1/Sqrt[2] - Cos[7*Pi/16])/Sin[7*Pi/16] ~=
+ 0.52204745462729 */
+ y[8] -= (y[56]*8553 + 8192) >> 14;
+ /* 5681/4096 ~= Sqrt[2]*Sin[7*Pi/16] ~= 1.38703984532215 */
+ y[56] += (y[8]*5681 + 2048) >> 12;
+ /* 5091/8192 ~= (1/Sqrt[2] - Cos[7*Pi/16]/2)/Sin[7*Pi/16] ~=
+ 0.6215036383171189 */
+ y[8] -= (y[56]*5091 + 4096) >> 13;
+ /* 7335/32768 ~= (1/Sqrt[2] - Cos[3*Pi/16])/Sin[3*Pi/16] ~=
+ 0.223847182092655 */
+ y[24] -= (y[40]*7335 + 16384) >> 15;
+ /* 1609/2048 ~= Sqrt[2]*Sin[3*Pi/16] ~= 0.785694958387102 */
+ y[40] -= (y[24]*1609 + 1024) >> 11;
+ /* 537/1024 ~= (1/Sqrt[2] - Cos[3*Pi/16]/2)/Sin[3*Pi/16] ~=
+ 0.524455699240090 */
+ y[24] += (y[40]*537 + 512) >> 10;
+ tmp[2] = DAALA_DCT_RSHIFT(y[40], 1);
+ y[8] += tmp[2];
+ y[40] -= y[8];
+ tmp[3] = DAALA_DCT_RSHIFT(y[56], 1);
+ y[24] = tmp[3] - y[24];
+ y[56] -= y[24];
+ /* 3393/8192 ~= Tan[Pi/8] ~= 0.414213562373095 */
+ y[24] -= (y[40]*3393 + 4096) >> 13;
+ /* 5793/8192 ~= Sin[Pi/4] ~= 0.707106781186547 */
+ y[40] += (y[24]*5793 + 4096) >> 13;
+ /* 13573/32768 ~= Tan[Pi/8] ~= 0.414213562373095 */
+ y[24] -= (y[40]*13573 + 16384) >> 15;
+ /* 4573/4096 ~= 4*Sin[Pi/8] - Tan[Pi/8] ~= 1.11652016708726 */
+ y[48] += (y[16]*4573 + 2048) >> 12;
+ /* 669/1024 ~= Cos[Pi/8]/Sqrt[2] ~= 0.653281482438188 */
+ y[16] -= (y[48]*669 + 512) >> 10;
+ /* 11507/16384 ~= 4*Sin[Pi/8] - 2*Tan[Pi/8] ~= 0.702306604714169 */
+ y[48] += (y[16]*11507 + 8192) >> 14;
+ y[32] = y[0] - y[32];
+ tmp[1] = DAALA_DCT_RSHIFT(y[32], 1);
+ y[0] -= tmp[1];
+ tmp[0] = DAALA_DCT_RSHIFT(y[48], 1);
+ y[0] += tmp[0];
+ y[48] = y[0] - y[48];
+ y[16] = tmp[1] - y[16];
+ y[32] -= y[16];
+ y[56] = y[0] - y[56];
+ tmp[9] = DAALA_DCT_RSHIFT(y[56], 1);
+ y[0] -= tmp[9];
+ y[32] += y[24];
+ tmp[12] = DAALA_DCT_RSHIFT(y[32], 1);
+ y[24] = tmp[12] - y[24];
+ y[40] = y[16] - y[40];
+ tmp[10] = DAALA_DCT_RSHIFT(y[40], 1);
+ y[16] -= tmp[10];
+ y[48] += y[8];
+ tmp[11] = DAALA_DCT_RSHIFT(y[48], 1);
+ y[8] = tmp[11] - y[8];
+ tmp[8] = DAALA_DCT_RSHIFT(y[60], 1);
+ y[0] += tmp[8];
+ y[60] = y[0] - y[60];
+ y[28] = tmp[12] - y[28];
+ y[32] -= y[28];
+ y[16] += DAALA_DCT_RSHIFT(y[44], 1);
+ y[44] = y[16] - y[44];
+ y[12] = tmp[11] - y[12];
+ y[48] -= y[12];
+ y[8] += DAALA_DCT_RSHIFT(y[52], 1);
+ y[52] = y[8] - y[52];
+ y[20] = tmp[10] - y[20];
+ y[40] -= y[20];
+ y[24] += DAALA_DCT_RSHIFT(y[36], 1);
+ y[36] = y[24] - y[36];
+ y[4] = tmp[9] - y[4];
+ y[56] -= y[4];
+ y[62] = y[0] - y[62];
+ tmp[41] = DAALA_DCT_RSHIFT(y[62], 1);
+ y[0] -= tmp[41];
+ y[32] += y[30];
+ tmp[56] = DAALA_DCT_RSHIFT(y[32], 1);
+ y[30] = tmp[56] - y[30];
+ y[46] = y[16] - y[46];
+ tmp[42] = DAALA_DCT_RSHIFT(y[46], 1);
+ y[16] -= tmp[42];
+ y[48] += y[14];
+ tmp[55] = DAALA_DCT_RSHIFT(y[48], 1);
+ y[14] = tmp[55] - y[14];
+ y[54] = y[8] - y[54];
+ tmp[43] = DAALA_DCT_RSHIFT(y[54], 1);
+ y[8] -= tmp[43];
+ y[40] += y[22];
+ tmp[54] = DAALA_DCT_RSHIFT(y[40], 1);
+ y[22] = tmp[54] - y[22];
+ y[38] = y[24] - y[38];
+ tmp[44] = DAALA_DCT_RSHIFT(y[38], 1);
+ y[24] -= tmp[44];
+ y[56] += y[6];
+ tmp[53] = DAALA_DCT_RSHIFT(y[56], 1);
+ y[6] = tmp[53] - y[6];
+ y[58] = y[4] - y[58];
+ tmp[45] = DAALA_DCT_RSHIFT(y[58], 1);
+ y[4] -= tmp[45];
+ y[36] += y[26];
+ tmp[52] = DAALA_DCT_RSHIFT(y[36], 1);
+ y[26] = tmp[52] - y[26];
+ y[42] = y[20] - y[42];
+ tmp[46] = DAALA_DCT_RSHIFT(y[42], 1);
+ y[20] -= tmp[46];
+ y[52] += y[10];
+ tmp[51] = DAALA_DCT_RSHIFT(y[52], 1);
+ y[10] = tmp[51] - y[10];
+ y[50] = y[12] - y[50];
+ tmp[47] = DAALA_DCT_RSHIFT(y[50], 1);
+ y[12] -= tmp[47];
+ y[44] += y[18];
+ tmp[50] = DAALA_DCT_RSHIFT(y[44], 1);
+ y[18] = tmp[50] - y[18];
+ y[34] = y[28] - y[34];
+ tmp[48] = DAALA_DCT_RSHIFT(y[34], 1);
+ y[28] -= tmp[48];
+ y[60] += y[2];
+ tmp[49] = DAALA_DCT_RSHIFT(y[60], 1);
+ y[2] = tmp[49] - y[2];
+ tmp[25] = DAALA_DCT_RSHIFT(y[63], 1);
+ y[0] += tmp[25];
+ y[63] = y[0] - y[63];
+ y[31] = tmp[56] - y[31];
+ y[32] -= y[31];
+ tmp[26] = DAALA_DCT_RSHIFT(y[47], 1);
+ y[16] += tmp[26];
+ y[47] = y[16] - y[47];
+ y[15] = tmp[55] - y[15];
+ y[48] -= y[15];
+ tmp[27] = DAALA_DCT_RSHIFT(y[55], 1);
+ y[8] += tmp[27];
+ y[55] = y[8] - y[55];
+ y[23] = tmp[54] - y[23];
+ y[40] -= y[23];
+ tmp[28] = DAALA_DCT_RSHIFT(y[39], 1);
+ y[24] += tmp[28];
+ y[39] = y[24] - y[39];
+ y[7] = tmp[53] - y[7];
+ y[56] -= y[7];
+ tmp[29] = DAALA_DCT_RSHIFT(y[59], 1);
+ y[4] += tmp[29];
+ y[59] = y[4] - y[59];
+ y[27] = tmp[52] - y[27];
+ y[36] -= y[27];
+ tmp[30] = DAALA_DCT_RSHIFT(y[43], 1);
+ y[20] += tmp[30];
+ y[43] = y[20] - y[43];
+ y[11] = tmp[51] - y[11];
+ y[52] -= y[11];
+ tmp[31] = DAALA_DCT_RSHIFT(y[51], 1);
+ y[12] += tmp[31];
+ y[51] = y[12] - y[51];
+ y[19] = tmp[50] - y[19];
+ y[44] -= y[19];
+ tmp[32] = DAALA_DCT_RSHIFT(y[35], 1);
+ y[28] += tmp[32];
+ y[35] = y[28] - y[35];
+ y[3] = tmp[49] - y[3];
+ y[60] -= y[3];
+ tmp[33] = DAALA_DCT_RSHIFT(y[61], 1);
+ y[2] += tmp[33];
+ y[61] = y[2] - y[61];
+ y[29] = tmp[48] - y[29];
+ y[34] -= y[29];
+ tmp[34] = DAALA_DCT_RSHIFT(y[45], 1);
+ y[18] += tmp[34];
+ y[45] = y[18] - y[45];
+ y[13] = tmp[47] - y[13];
+ y[50] -= y[13];
+ tmp[35] = DAALA_DCT_RSHIFT(y[53], 1);
+ y[10] += tmp[35];
+ y[53] = y[10] - y[53];
+ y[21] = tmp[46] - y[21];
+ y[42] -= y[21];
+ tmp[36] = DAALA_DCT_RSHIFT(y[37], 1);
+ y[26] += tmp[36];
+ y[37] = y[26] - y[37];
+ y[5] = tmp[45] - y[5];
+ y[58] -= y[5];
+ tmp[37] = DAALA_DCT_RSHIFT(y[57], 1);
+ y[6] += tmp[37];
+ y[57] = y[6] - y[57];
+ y[25] = tmp[44] - y[25];
+ y[38] -= y[25];
+ tmp[38] = DAALA_DCT_RSHIFT(y[41], 1);
+ y[22] += tmp[38];
+ y[41] = y[22] - y[41];
+ y[9] = tmp[43] - y[9];
+ y[54] -= y[9];
+ tmp[39] = DAALA_DCT_RSHIFT(y[49], 1);
+ y[14] += tmp[39];
+ y[49] = y[14] - y[49];
+ y[17] = tmp[42] - y[17];
+ y[46] -= y[17];
+ tmp[40] = DAALA_DCT_RSHIFT(y[33], 1);
+ y[30] += tmp[40];
+ y[33] = y[30] - y[33];
+ y[1] = tmp[41] - y[1];
+ y[62] -= y[1];
+ WRITE(x + 0*xstride, y[0]);
+ WRITE(x + 1*xstride, y[32]);
+ WRITE(x + 2*xstride, y[16]);
+ WRITE(x + 3*xstride, y[48]);
+ WRITE(x + 4*xstride, y[8]);
+ WRITE(x + 5*xstride, y[40]);
+ WRITE(x + 6*xstride, y[24]);
+ WRITE(x + 7*xstride, y[56]);
+ WRITE(x + 8*xstride, y[4]);
+ WRITE(x + 9*xstride, y[36]);
+ WRITE(x + 10*xstride, y[20]);
+ WRITE(x + 11*xstride, y[52]);
+ WRITE(x + 12*xstride, y[12]);
+ WRITE(x + 13*xstride, y[44]);
+ WRITE(x + 14*xstride, y[28]);
+ WRITE(x + 15*xstride, y[60]);
+ WRITE(x + 16*xstride, y[2]);
+ WRITE(x + 17*xstride, y[34]);
+ WRITE(x + 18*xstride, y[18]);
+ WRITE(x + 19*xstride, y[50]);
+ WRITE(x + 20*xstride, y[10]);
+ WRITE(x + 21*xstride, y[42]);
+ WRITE(x + 22*xstride, y[26]);
+ WRITE(x + 23*xstride, y[58]);
+ WRITE(x + 24*xstride, y[6]);
+ WRITE(x + 25*xstride, y[38]);
+ WRITE(x + 26*xstride, y[22]);
+ WRITE(x + 27*xstride, y[54]);
+ WRITE(x + 28*xstride, y[14]);
+ WRITE(x + 29*xstride, y[46]);
+ WRITE(x + 30*xstride, y[30]);
+ WRITE(x + 31*xstride, y[62]);
+ WRITE(x + 32*xstride, y[1]);
+ WRITE(x + 33*xstride, y[33]);
+ WRITE(x + 34*xstride, y[17]);
+ WRITE(x + 35*xstride, y[49]);
+ WRITE(x + 36*xstride, y[9]);
+ WRITE(x + 37*xstride, y[41]);
+ WRITE(x + 38*xstride, y[25]);
+ WRITE(x + 39*xstride, y[57]);
+ WRITE(x + 40*xstride, y[5]);
+ WRITE(x + 41*xstride, y[37]);
+ WRITE(x + 42*xstride, y[21]);
+ WRITE(x + 43*xstride, y[53]);
+ WRITE(x + 44*xstride, y[13]);
+ WRITE(x + 45*xstride, y[45]);
+ WRITE(x + 46*xstride, y[29]);
+ WRITE(x + 47*xstride, y[61]);
+ WRITE(x + 48*xstride, y[3]);
+ WRITE(x + 49*xstride, y[35]);
+ WRITE(x + 50*xstride, y[19]);
+ WRITE(x + 51*xstride, y[51]);
+ WRITE(x + 52*xstride, y[11]);
+ WRITE(x + 53*xstride, y[43]);
+ WRITE(x + 54*xstride, y[27]);
+ WRITE(x + 55*xstride, y[59]);
+ WRITE(x + 56*xstride, y[7]);
+ WRITE(x + 57*xstride, y[39]);
+ WRITE(x + 58*xstride, y[23]);
+ WRITE(x + 59*xstride, y[55]);
+ WRITE(x + 60*xstride, y[15]);
+ WRITE(x + 61*xstride, y[47]);
+ WRITE(x + 62*xstride, y[31]);
+ WRITE(x + 63*xstride, y[63]);
+}
+
+#define IDCT_DECL(BSIZE) \
+static void RENAME(idct_2D_##BSIZE)(uint8_t *dst, int dstride, \
+ const uint8_t *_src, int istride) \
+{ \
+ int i, j; \
+ pixel coef[BSIZE], tmp[BSIZE*BSIZE]; \
+ pixel *dstr = (pixel *)dst; \
+ const pixel *src = (pixel *)_src; \
+ for (i = 0; i < BSIZE; i++) { \
+ for (j = 0; j < BSIZE; j++) \
+ coef[j] = READ(src + i*istride + j); \
+ idct_1D_##BSIZE(tmp + i, BSIZE, coef); \
+ } \
+ for (i = 0; i < BSIZE; i++) \
+ idct_1D_##BSIZE(dstr + i, dstride, tmp + BSIZE*i); \
+}
+
+IDCT_DECL(4)
+IDCT_DECL(8)
+IDCT_DECL(16)
+IDCT_DECL(32)
+IDCT_DECL(64)
+
+#define FILTER_PARAM_4_3 (33)
+#define FILTER_PARAM_4_2 (-15)
+#define FILTER_PARAM_4_1 (75)
+#define FILTER_PARAM_4_0 (85)
+
+static av_always_inline void prefilter_4x4(pixel *dst, const pixel *src)
+{
+ pixel t[4];
+ t[3] = src[0]-src[3];
+ t[2] = src[1]-src[2];
+ t[1] = src[1]-(t[2]>>1);
+ t[0] = src[0]-(t[3]>>1);
+ # if FILTER_PARAM_4_0 != 64
+ t[2] = t[2]*FILTER_PARAM_4_0>>6;
+ t[2] += -t[2]>>(32-1)&1;
+ # endif
+ # if FILTER_PARAM_4_1 != 64
+ t[3] = t[3]*FILTER_PARAM_4_1>>6;
+ t[3] += -t[3]>>(32-1)&1;
+ # endif
+ t[3] += (t[2]*FILTER_PARAM_4_2+32)>>6;
+ t[2] += (t[3]*FILTER_PARAM_4_3+32)>>6;
+ t[0] += t[3]>>1;
+ dst[0] = (pixel)t[0];
+ t[1] += t[2]>>1;
+ dst[1] = t[1];
+ dst[2] = t[1] - t[2];
+ dst[3] = t[0] - t[3];
+}
+
+static av_always_inline void postfilter_4x4(pixel *dst, const pixel *src)
+{
+ pixel t[4];
+ t[3] = src[0]-src[3];
+ t[2] = src[1]-src[2];
+ t[1] = src[1]-(t[2]>>1);
+ t[0] = src[0]-(t[3]>>1);
+ t[2] -= (t[3]*FILTER_PARAM_4_3+32)>>6;
+ t[3] -= (t[2]*FILTER_PARAM_4_2+32)>>6;
+ #if FILTER_PARAM_4_1 != 64
+ t[3] = t[3]*(1 << 6)/FILTER_PARAM_4_1;
+ #endif
+ #if FILTER_PARAM_4_0 != 64
+ t[2] = t[2]*(1 << 6)/FILTER_PARAM_4_0;
+ #endif
+ t[0] += t[3]>>1;
+ dst[0] = t[0];
+ t[1] += t[2]>>1;
+ dst[1] = t[1];
+ dst[2] = t[1] - t[2];
+ dst[3] = t[0] - t[3];
+}
+
+static void RENAME(daala_split_prefilter)(uint8_t *_blk, int stride, int bs,
+ int hfilter, int vfilter)
+{
+ int i, j;
+ const int f = 0;
+ pixel *c;
+ pixel *blk = (pixel *)_blk;
+ if (hfilter) {
+ c = blk + ((2 << bs) - (2 << f))*stride;
+ for (j = 0; j < 4 << bs; j++) {
+ int k;
+ pixel t[4 << DAALA_NBSIZES];
+ for (k = 0; k < 4 << f; k++) t[k] = c[stride*k + j];
+ prefilter_4x4(t, t);
+ for (k = 0; k < 4 << f; k++) c[stride*k + j] = t[k];
+ }
+ }
+ if (vfilter) {
+ c = blk + (2 << bs) - (2 << f);
+ for (i = 0; i < 4 << bs; i++) {
+ prefilter_4x4(c + i*stride, c + i*stride);
+ }
+ }
+}
+
+
+static void RENAME(daala_split_postfilter)(uint8_t *_blk, int stride, int bs,
+ int hfilter, int vfilter)
+{
+ int i, j;
+ const int f = 0;
+ pixel *c;
+ pixel *blk = (pixel *)_blk;
+ if (vfilter) {
+ c = blk + (2 << bs) - (2 << f);
+ for (i = 0; i < 4 << bs; i++) {
+ postfilter_4x4(c + i*stride, c + i*stride);
+ }
+ }
+ if (hfilter) {
+ c = blk + ((2 << bs) - (2 << f))*stride;
+ for (j = 0; j < 4 << bs; j++) {
+ int k;
+ pixel t[4 << DAALA_NBSIZES];
+ for (k = 0; k < 4 << f; k++) t[k] = c[stride*k + j];
+ postfilter_4x4(t, t);
+ for (k = 0; k < 4 << f; k++) c[stride*k + j] = t[k];
+ }
+ }
+}
+
+static void RENAME(daala_frame_postfilter)(uint8_t *_blk, int stride,
+ int sbn_x, int sbn_y,
+ int xdec, int ydec)
+{
+ int i, j, f;
+ pixel *c;
+ pixel *blk = (pixel *)_blk;
+ f = 0;
+ c = blk + (DAALA_BSIZE_MAX >> ydec) - (2 << f);
+ for (i = 1; i < sbn_x; i++) {
+ for (j = 0; j < sbn_y << DAALA_LOG_BSIZE_MAX >> ydec; j++) {
+ postfilter_4x4(c + j*stride, c + j*stride);
+ }
+ c += DAALA_BSIZE_MAX >> xdec;
+ }
+ c = blk + ((DAALA_BSIZE_MAX >> ydec) - (2 << f))*stride;
+ for (i = 1; i < sbn_y; i++) {
+ for (j = 0; j < sbn_x << DAALA_LOG_BSIZE_MAX >> xdec; j++) {
+ int k;
+ pixel t[4 << DAALA_NBSIZES];
+ for (k = 0; k < 4 << f; k++) t[k] = c[stride*k + j];
+ postfilter_4x4(t, t);
+ for (k = 0; k < 4 << f; k++) c[stride*k + j] = t[k];
+ }
+ c += DAALA_BSIZE_MAX*stride >> ydec;
+ }
+}
+
+/* Increase horizontal frequency resolution of a block and return the LF */
+static av_always_inline void daala_sf_hor_up(pixel *dst, int dstride,
+ const pixel *src, int istride,
+ int dx, int n)
+{
+ int x, y;
+ for (y = 0; y < n; y++) {
+ for (x = 0; x < n >> 1; x++) {
+ const int hswap = x & 1;
+ pixel ll = src[y*istride + x];
+ pixel lh = src[y*istride + x + dx];
+ lh = ll - lh;
+ ll -= DAALA_DCT_RSHIFT(lh, 1);
+ dst[y*dstride + 2*x + hswap] = ll;
+ dst[y*dstride + 2*x + 1 - hswap] = lh;
+ }
+ }
+}
+
+/* Increase vertical frequency resolution of a block and return the LF */
+static av_always_inline void daala_sf_ver_up(pixel *dst, int dstride,
+ const pixel *src, int istride,
+ int dy, int n)
+{
+ int x, y;
+ for (y = 0; y < n >> 1; y++) {
+ const int vswap = y & 1;
+ for (x = 0; x < n; x++) {
+ pixel ll = src[y*istride + x];
+ pixel hl = src[(y + dy)*istride + x];
+ hl = ll - hl;
+ ll -= DAALA_DCT_RSHIFT(hl, 1);
+ dst[(2*y + vswap)*dstride + x] = ll;
+ dst[(2*y + 1 - vswap)*dstride + x] = hl;
+ }
+ }
+}
+
+/* Increase vertical and horizontal resolution of a block and return the LF */
+static av_always_inline void daala_sf_ful_up(pixel *dst, int dstride,
+ const pixel *src, int istride,
+ int dx, int dy, int n)
+{
+ int x, y;
+ for (y = 0; y < n >> 1; y++) {
+ const int vswap = y & 1;
+ for (x = 0; x < n >> 1; x++) {
+ const int hswap = x & 1;
+ pixel ll = src[y*istride + x];
+ pixel lh = src[y*istride + x + dx];
+ pixel hl = src[(y + dy)*istride + x];
+ pixel hh = src[(y + dy)*istride + x + dx];
+ daala_haar_kern(&ll, &hl, &lh, &hh);
+ dst[(2*y + vswap)*dstride + 2*x + hswap] = ll;
+ dst[(2*y + vswap)*dstride + 2*x + 1 - hswap] = lh;
+ dst[(2*y + 1 - vswap)*dstride + 2*x + hswap] = hl;
+ dst[(2*y + 1 - vswap)*dstride + 2*x + 1 - hswap] = hh;
+ }
+ }
+}
+
+/* Chroma from luma */
+static void RENAME(daala_cfl_resample)(uint8_t *_dst, int dstride,
+ const uint8_t *_src, int istride,
+ int xdec, int ydec, int bs, int chroma_bs)
+{
+ int i, j;
+ const int n = 4 << bs;
+ pixel *dst = (pixel *)_dst;
+ pixel *src = (pixel *)_src;
+ if (!chroma_bs && (xdec || ydec)) {
+ if (xdec) {
+ if (ydec) {
+ daala_sf_ful_up(dst, dstride, src, istride, n, n, n);
+ for (i = 0; i < 4; i++) {
+ for (j = 0; j < 4; j++) {
+ const uint8_t scale = ff_daaladsp_cfl_scale[j][i];
+ dst[i*dstride + j] = (scale*dst[i*dstride + j] + 64) >> 7;
+ }
+ }
+ } else {
+ daala_sf_hor_up(dst, dstride, src, istride, n, n);
+ }
+ } else {
+ daala_sf_ver_up(dst, dstride, src, istride, n, n);
+ }
+ } else {
+ for (i = 0; i < n; i++) {
+ for (j = 0; j < n; j++) {
+ dst[i*dstride + j] = src[i*istride + j];
+ }
+ }
+ }
+}
+
+static void RENAME(daala_intra_prediction)(uint8_t *_pred, const uint8_t *_ref,
+ int stride, int x, int y,
+ enum DaalaBsize *bsize, int bstride,
+ enum DaalaBsize bs)
+{
+ int i;
+ int64_t g1 = 0, g2 = 0;
+ const int n = 1 << (bs + DAALA_LOG_BSIZE0);
+ const int t = y && DAALA_BSIZE4x4(bsize, bstride, x, y - 1) == bs;
+ const int l = x && DAALA_BSIZE4x4(bsize, bstride, x - 1, y) == bs;
+ pixel *pred = (pixel *)_pred;
+ const pixel *ref = (const pixel *)_ref;
+ if (t) {
+ for (i = 1; i < 4; i++)
+ g1 += abs(ref[-n*stride + i]);
+ }
+ if (l) {
+ for (i = 1; i < 4; i++)
+ g2 += abs(ref[-n + i*stride]);
+ }
+ if (t) {
+ for (i = 4; i < n; i++)
+ pred[i] = ref[-n*stride + i];
+ }
+ if (l) {
+ for (i = 4; i < n; i++)
+ pred[i*n] = ref[-n + i*stride];
+ }
+ if (g1 > g2) {
+ if (t)
+ for (i = 1; i < 4; i++)
+ pred[i] = ref[-n*stride + i];
+ } else if (l) {
+ for (i = 1; i < 4; i++)
+ pred[i*n] = ref[-n + i*stride];
+ }
+}
+
+static void RENAME(daaladsp_init)(DaalaDSP *d)
+{
+ /* Prediction */
+ d->cfl = RENAME(daala_cfl_resample );
+ d->intrapred = RENAME(daala_intra_prediction);
+
+ /* Filters */
+ d->pre_split_filter = RENAME(daala_split_prefilter );
+ d->post_split_filter = RENAME(daala_split_postfilter);
+ d->frame_postfilter = RENAME(daala_frame_postfilter);
+
+ /* Transforms */
+ d->idwt = RENAME(daala_haar_iwt);
+ d->idct[DAALA_BLOCKSIZE_4x4] = RENAME(idct_2D_4 );
+ d->idct[DAALA_BLOCKSIZE_8x8] = RENAME(idct_2D_8 );
+ d->idct[DAALA_BLOCKSIZE_16x16] = RENAME(idct_2D_16 );
+ d->idct[DAALA_BLOCKSIZE_32x32] = RENAME(idct_2D_32 );
+ d->idct[DAALA_BLOCKSIZE_64x64] = RENAME(idct_2D_64 );
+}
+
+#undef RENAME
+#undef WRITE
+#undef READ
+#undef pixel
+
+av_cold int ff_daaladsp_init(DaalaDSP *d, int bit_depth)
+{
+ switch(bit_depth) {
+ case 8:
+ daaladsp_init_8bit(d);
+ break;
+ case 10:
+ case 12:
+ default:
+ return 1;
+ break;
+ }
+ return 0;
+}
diff --git a/libavcodec/daaladsp.h b/libavcodec/daaladsp.h
new file mode 100644
index 0000000..a98e3cd
--- /dev/null
+++ b/libavcodec/daaladsp.h
@@ -0,0 +1,106 @@
+/*
+ * Daala video decoder
+ *
+ * Copyright (C) 2015-2016 Rostislav Pehlivanov <atomnuker at gmail.com>
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * This file incorporates work covered by the following copyright and
+ * permission notice:
+ *
+ * Copyright 2001-2015 Xiph.Org and contributors.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+ * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef AVCODEC_DAALADSP_H
+#define AVCODEC_DAALADSP_H
+
+#include "daala.h"
+
+/* Haar transform (external), internally in daaladct.c it's manually inlined */
+static av_always_inline void daala_haar_kern(dctcoef *a, dctcoef *b,
+ dctcoef *c, dctcoef *d)
+{
+ dctcoef tmp;
+ *a += *c;
+ *d -= *b;
+ tmp = (*a - *d) >> 1;
+ *b = tmp - *b;
+ *c = tmp - *c;
+ *a -= *b;
+ *d += *c;
+}
+
+typedef struct DaalaDSP {
+
+ /* Intra-block prediction */
+ void (*intrapred)(uint8_t *pred, const uint8_t *ref,
+ int stride, int x, int y,
+ enum DaalaBsize *bsize, int bstride,
+ enum DaalaBsize bs);
+
+ /* Chroma from luma */
+ void (*cfl)(uint8_t *dst, int dstride, const uint8_t *src,
+ int istride, int xdec, int ydec, int bs, int chroma_bs);
+
+ /* Pre-split filter */
+ void (*pre_split_filter)(uint8_t *blk, int stride, int bs,
+ int hfilter, int vfilter);
+
+ /* Post-split filter */
+ void (*post_split_filter)(uint8_t *blk, int stride, int bs,
+ int hfilter, int vfilter);
+
+ /* Frame postfilter */
+ void (*frame_postfilter)(uint8_t *blk, int stride, int sbn_x, int sbn_y,
+ int xdec, int ydec);
+
+ /* IDWT for the Haar wavelets */
+ void (*idwt)(uint8_t *dst, const int dst_stride, const uint8_t *src,
+ const int src_stride, const int ln);
+
+ /* IDCT transforms */
+ void (*(idct)[DAALA_NBSIZES])(uint8_t *dst, int istride, const uint8_t *src,
+ int ostride);
+
+} DaalaDSP;
+
+int ff_daaladsp_init(DaalaDSP *d, int bit_depth);
+
+#endif /* AVCODEC_DAALADSP_H */
diff --git a/libavcodec/daalatab.c b/libavcodec/daalatab.c
new file mode 100644
index 0000000..0c75057
--- /dev/null
+++ b/libavcodec/daalatab.c
@@ -0,0 +1,1570 @@
+/*
+ * Daala video decoder
+ *
+ * Copyright (C) 2015-2016 Rostislav Pehlivanov <atomnuker at gmail.com>
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * This file incorporates work covered by the following copyright and
+ * permission notice:
+ *
+ * Copyright 2001-2015 Xiph.Org and contributors.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+ * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "daalatab.h"
+
+const struct DaalaPixFmts ff_daala_valid_formats[] = {
+ {AV_PIX_FMT_YUV420P, 3, 8, 1, {{0,0}, {1,1}, {1,1}, {0,0}}},
+ {AV_PIX_FMT_YUV444P, 3, 8, 1, {{0,0}, {0,0}, {0,0}, {0,0}}}
+};
+const int ff_daala_valid_formats_num = FF_ARRAY_ELEMS(ff_daala_valid_formats);
+
+/* Haar "quantization matrix" for each decomposition level */
+const uint8_t ff_daala_haar_qm[][DAALA_LOG_BSIZE_MAX] = {
+ {16, 16, 16, 16, 24, 32}, /* horizontal/vertical direction. */
+ {16, 16, 16, 24, 32, 48}, /* "diagonal" direction. */
+};
+
+/* Keyframe blur filter strength for every plane */
+const uint8_t ff_daala_bilinear_blur[] = {5, 20, 20, 5};
+
+/* Haar basis scaling compensation, [0] - x,y; [1] - diag */
+const uint8_t ff_daala_dc_comp[][2] = { {21, 25}, {18, 20}, {17, 18}, {17, 17} };
+
+/* Flat (e.g. PSNR) QM */
+const uint8_t daala_qm_flat[] = {
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16
+};
+
+/* HVS quantization matrix */
+static const uint8_t daala_qm_hvs[] = {
+ 16, 16, 18, 21, 24, 28, 32, 36,
+ 16, 17, 20, 21, 24, 27, 31, 35,
+ 18, 20, 24, 25, 27, 31, 33, 38,
+ 21, 21, 25, 28, 30, 34, 37, 42,
+ 24, 24, 27, 30, 34, 38, 43, 49,
+ 28, 27, 31, 34, 38, 44, 50, 58,
+ 32, 31, 33, 37, 43, 50, 58, 68,
+ 36, 35, 38, 42, 49, 58, 68, 78
+};
+
+const uint8_t *const ff_daala_qmatrices[] = {
+ daala_qm_flat,
+ daala_qm_hvs
+};
+const int ff_daala_qmatrices_num = FF_ARRAY_ELEMS(ff_daala_qmatrices);
+
+/* Chroma from luma scaling */
+const uint8_t ff_daaladsp_cfl_scale[4][4] = {
+ { 128, 128, 100, 36 },
+ { 128, 80, 71, 35 },
+ { 100, 71, 35, 31 },
+ { 36, 35, 31, 18 },
+};
+
+/* Generator: "trunc(e(((coded_quantizer)-6.235)*.10989525)*(1<<4))"
+ * Read the comment in quantizers.c, OD_CODED_QUANTIZER_MAP_Q4 of libdaala */
+const int ff_daala_quant_codemap[] = {
+ 0x0000, /* 0 (lossless) */
+ 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000F, /* 1 */
+ 0x0011, 0x0013, 0x0015, 0x0018, 0x001B, 0x001E, /* 7 */
+ 0x0021, 0x0024, 0x0029, 0x002E, 0x0034, 0x003A, /* 13 */
+ 0x0041, 0x0048, 0x0051, 0x005A, 0x0064, 0x0070, /* 19 */
+ 0x007D, 0x008C, 0x009C, 0x00AE, 0x00C3, 0x00D9, /* 25 */
+ 0x00F3, 0x010F, 0x012F, 0x0152, 0x0179, 0x01A5, /* 31 */
+ 0x01D6, 0x020D, 0x0249, 0x028E, 0x02DA, 0x032E, /* 37 */
+ 0x038D, 0x03F7, 0x046D, 0x04F0, 0x0583, 0x0627, /* 43 */
+ 0x06De, 0x07AA, 0x088E, 0x098D, 0x0AA9, 0x0BE6, /* 49 */
+ 0x0D48, 0x0ED3, 0x108C, 0x1278, 0x149D, 0x1702, /* 55 */
+ 0x19AE, 0x1CAA, 0x1FFF /* 61 */
+};
+const int ff_daala_quant_codemap_size = FF_ARRAY_ELEMS(ff_daala_quant_codemap);
+
+/* Probability distribution functions */
+const ent_rng ff_daalaent_cdf_tab[] = {
+ 16384, 32768, 10923, 21845, 32768, 8192, 16384, 24576, 32768, 6554, 13107,
+ 19661, 26214, 32768, 5461, 10923, 16384, 21845, 27307, 32768, 4681, 9362,
+ 14043, 18725, 23406, 28087, 32768, 4096, 8192, 12288, 16384, 20480, 24576,
+ 28672, 32768, 3641, 7282, 10923, 14564, 18204, 21845, 25486, 29127, 32768,
+ 3277, 6554, 9830, 13107, 16384, 19661, 22938, 26214, 29491, 32768, 2979,
+ 5958, 8937, 11916, 14895, 17873, 20852, 23831, 26810, 29789, 32768, 2731,
+ 5461, 8192, 10923, 13653, 16384, 19115, 21845, 24576, 27307, 30037, 32768,
+ 2521, 5041, 7562, 10082, 12603, 15124, 17644, 20165, 22686, 25206, 27727,
+ 30247, 32768, 341, 4681, 7022, 9362, 11703, 14043, 16384, 18725, 21065,
+ 23406, 25746, 28087, 30427, 32768, 2185, 4369, 6554, 8738, 10923, 13107,
+ 15292, 17476, 19661, 21845, 24030, 26214, 28399, 30583, 32768, 2048, 4096,
+ 6144, 8192, 10240, 12288, 14336, 16384, 18432, 20480, 22528, 24576, 26624,
+ 28672, 30720, 32768
+};
+
+/* TODO: maybe generate on runtime if cheap enough? */
+const ent_rng ff_daalaent_cdf_exp_tab[][16] = {
+ {32753,32754,32755,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {32499,32753,32755,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {32243,32747,32755,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {31987,32737,32755,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {31732,32724,32755,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {31476,32706,32754,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {31220,32684,32753,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {30964,32658,32751,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {30708,32628,32748,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {30452,32594,32745,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {30198,32558,32742,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {29941,32515,32736,32755,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {29686,32470,32731,32755,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {29429,32419,32723,32754,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {29174,32366,32715,32753,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {28918,32308,32705,32752,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {28662,32246,32694,32750,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {28406,32180,32681,32748,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {28150,32110,32667,32745,32756,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {27894,32036,32651,32742,32756,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {27639,31959,32634,32739,32755,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {27383,31877,32614,32735,32755,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {27126,31790,32592,32730,32754,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {26871,31701,32569,32725,32753,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {26615,31607,32543,32719,32752,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {26361,31511,32517,32713,32751,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {26104,31408,32485,32704,32748,32757,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {25848,31302,32452,32695,32746,32757,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {25591,31191,32416,32684,32743,32756,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {25336,31078,32379,32674,32741,32756,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {25080,30960,32338,32661,32737,32755,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {24824,30838,32295,32648,32733,32754,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {24568,30712,32248,32632,32728,32752,32758,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {24313,30583,32199,32616,32723,32751,32758,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {24057,30449,32147,32598,32718,32750,32758,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {23801,30311,32091,32578,32711,32747,32757,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {23546,30170,32033,32557,32704,32745,32757,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {23288,30022,31969,32532,32695,32742,32756,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {23033,29873,31904,32507,32686,32739,32755,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {22778,29720,31835,32479,32675,32735,32753,32759,32761,32762,32763,32764,32765,32766,32767,32768},
+ {22521,29561,31761,32449,32664,32731,32752,32759,32761,32762,32763,32764,32765,32766,32767,32768},
+ {22267,29401,31686,32418,32652,32727,32751,32759,32761,32762,32763,32764,32765,32766,32767,32768},
+ {22011,29235,31605,32383,32638,32722,32749,32758,32761,32762,32763,32764,32765,32766,32767,32768},
+ {21754,29064,31520,32345,32622,32715,32746,32757,32761,32762,32763,32764,32765,32766,32767,32768},
+ {21501,28893,31434,32307,32607,32710,32745,32757,32761,32762,32763,32764,32765,32766,32767,32768},
+ {21243,28713,31339,32262,32587,32701,32741,32755,32760,32762,32763,32764,32765,32766,32767,32768},
+ {20988,28532,31243,32217,32567,32693,32738,32754,32760,32762,32763,32764,32765,32766,32767,32768},
+ {20730,28344,31140,32167,32544,32682,32733,32752,32759,32762,32763,32764,32765,32766,32767,32768},
+ {20476,28156,31036,32116,32521,32673,32730,32751,32759,32762,32763,32764,32765,32766,32767,32768},
+ {20220,27962,30926,32061,32495,32661,32725,32749,32758,32762,32763,32764,32765,32766,32767,32768},
+ {19963,27763,30810,32000,32465,32647,32718,32746,32757,32761,32763,32764,32765,32766,32767,32768},
+ {19708,27562,30691,31938,32435,32633,32712,32743,32756,32761,32763,32764,32765,32766,32767,32768},
+ {19454,27358,30569,31873,32403,32618,32705,32741,32755,32761,32763,32764,32765,32766,32767,32768},
+ {19196,27146,30438,31801,32365,32599,32696,32736,32753,32760,32763,32764,32765,32766,32767,32768},
+ {18942,26934,30306,31728,32328,32581,32688,32733,32752,32760,32763,32764,32765,32766,32767,32768},
+ {18684,26714,30164,31647,32284,32558,32676,32727,32749,32758,32762,32764,32765,32766,32767,32768},
+ {18429,26493,30021,31565,32240,32535,32664,32721,32746,32757,32762,32764,32765,32766,32767,32768},
+ {18174,26268,29872,31477,32192,32510,32652,32715,32743,32756,32762,32764,32765,32766,32767,32768},
+ {17920,26040,29719,31386,32141,32483,32638,32708,32740,32754,32761,32764,32765,32766,32767,32768},
+ {17661,25803,29556,31286,32083,32451,32620,32698,32734,32751,32759,32763,32765,32766,32767,32768},
+ {17406,25566,29391,31184,32024,32418,32603,32690,32731,32750,32759,32763,32765,32766,32767,32768},
+ {17151,25325,29220,31076,31961,32383,32584,32680,32726,32748,32758,32763,32765,32766,32767,32768},
+ {16896,25080,29044,30964,31894,32344,32562,32668,32719,32744,32756,32762,32765,32766,32767,32768},
+ {16639,24829,28860,30844,31821,32302,32539,32655,32712,32740,32754,32761,32764,32766,32767,32768},
+ {16384,24576,28672,30720,31744,32256,32512,32640,32704,32736,32752,32760,32764,32766,32767,32768},
+ {16130,24320,28479,30591,31663,32208,32485,32625,32696,32732,32750,32759,32764,32766,32767,32768},
+ {15872,24056,28276,30452,31574,32152,32450,32604,32683,32724,32745,32756,32762,32765,32766,32768},
+ {15615,23789,28068,30308,31480,32094,32415,32583,32671,32717,32741,32754,32761,32764,32766,32768},
+ {15361,23521,27856,30159,31382,32032,32377,32560,32657,32709,32737,32752,32760,32764,32766,32768},
+ {15103,23245,27634,30000,31275,31963,32334,32534,32642,32700,32731,32748,32757,32762,32765,32768},
+ {14848,22968,27409,29837,31165,31891,32288,32505,32624,32689,32725,32744,32755,32761,32764,32768},
+ {14592,22686,27176,29666,31047,31813,32238,32474,32605,32678,32718,32740,32752,32759,32763,32768},
+ {14336,22400,26936,29488,30923,31730,32184,32439,32583,32664,32709,32735,32749,32757,32762,32768},
+ {14079,22109,26689,29301,30791,31641,32125,32401,32559,32649,32700,32729,32746,32756,32761,32768},
+ {13825,21817,26437,29108,30652,31545,32061,32359,32532,32632,32690,32723,32742,32753,32759,32768},
+ {13568,21518,26176,28905,30504,31441,31990,32312,32501,32611,32676,32714,32736,32749,32757,32768},
+ {13314,21218,25911,28697,30351,31333,31916,32262,32468,32590,32662,32705,32731,32746,32755,32768},
+ {13054,20908,25633,28475,30185,31214,31833,32205,32429,32564,32645,32694,32723,32741,32752,32768},
+ {12803,20603,25356,28252,30017,31093,31748,32147,32390,32538,32628,32683,32717,32737,32749,32768},
+ {12544,20286,25064,28013,29833,30956,31649,32077,32341,32504,32605,32667,32705,32729,32744,32768},
+ {12288,19968,24768,27768,29643,30815,31547,32005,32291,32470,32582,32652,32696,32723,32740,32768},
+ {12033,19647,24465,27514,29443,30664,31437,31926,32235,32431,32555,32633,32683,32714,32734,32768},
+ {11777,19321,24154,27250,29233,30504,31318,31839,32173,32387,32524,32612,32668,32704,32727,32768},
+ {11521,18991,23835,26976,29013,30334,31190,31745,32105,32338,32489,32587,32651,32692,32719,32768},
+ {11265,18657,23508,26691,28780,30151,31051,31641,32028,32282,32449,32559,32631,32678,32709,32768},
+ {11006,18316,23170,26394,28535,29957,30901,31528,31944,32220,32404,32526,32607,32661,32697,32768},
+ {10752,17976,22830,26091,28282,29754,30743,31408,31854,32154,32356,32491,32582,32643,32684,32768},
+ {10496,17630,22479,25775,28015,29538,30573,31276,31754,32079,32300,32450,32552,32621,32668,32768},
+ {10240,17280,22120,25448,27736,29309,30390,31133,31644,31995,32237,32403,32517,32595,32649,32768},
+ { 9984,16926,21753,25109,27443,29066,30194,30978,31523,31902,32166,32349,32476,32565,32627,32768},
+ { 9728,16568,21377,24759,27137,28809,29984,30811,31392,31801,32088,32290,32432,32532,32602,32768},
+ { 9474,16208,20995,24399,26819,28539,29762,30631,31249,31688,32000,32222,32380,32492,32572,32768},
+ { 9216,15840,20601,24023,26483,28251,29522,30435,31091,31563,31902,32146,32321,32447,32537,32768},
+ { 8959,15469,20199,23636,26133,27947,29265,30223,30919,31425,31792,32059,32253,32394,32496,32768},
+ { 8705,15097,19791,23238,25770,27629,28994,29997,30733,31274,31671,31963,32177,32334,32449,32768},
+ { 8449,14719,19373,22827,25390,27292,28704,29752,30530,31107,31535,31853,32089,32264,32394,32768},
+ { 8192,14336,18944,22400,24992,26936,28394,29488,30308,30923,31384,31730,31989,32184,32330,32768},
+ { 7936,13950,18507,21961,24578,26561,28064,29203,30066,30720,31216,31592,31877,32093,32256,32768},
+ { 7678,13558,18060,21507,24146,26166,27713,28897,29804,30498,31030,31437,31749,31988,32171,32768},
+ { 7423,13165,17606,21041,23698,25753,27342,28571,29522,30257,30826,31266,31606,31869,32073,32768},
+ { 7168,12768,17143,20561,23231,25317,26947,28220,29215,29992,30599,31073,31444,31734,31960,32768},
+ { 6911,12365,16669,20065,22744,24858,26526,27842,28881,29701,30348,30858,31261,31579,31830,32768},
+ { 6657,11961,16188,19556,22240,24379,26083,27441,28523,29385,30072,30620,31056,31404,31681,32768},
+ { 6400,11550,15694,19029,21712,23871,25609,27007,28132,29037,29766,30352,30824,31204,31509,32768},
+ { 6142,11134,15190,18486,21164,23340,25108,26544,27711,28659,29429,30055,30564,30977,31313,32768},
+ { 5890,10720,14682,17932,20598,22785,24579,26051,27258,28248,29060,29726,30273,30721,31089,32768},
+ { 5631,10295,14157,17356,20005,22199,24016,25520,26766,27798,28652,29359,29945,30430,30832,32768},
+ { 5377, 9871,13628,16768,19393,21587,23421,24954,26236,27308,28204,28953,29579,30102,30539,32768},
+ { 5121, 9441,13086,16161,18756,20945,22792,24351,25666,26776,27712,28502,29169,29731,30206,32768},
+ { 4865, 9007,12534,15538,18096,20274,22129,23708,25053,26198,27173,28004,28711,29313,29826,32768},
+ { 4608, 8568,11971,14896,17409,19569,21425,23020,24391,25569,26581,27451,28199,28842,29394,32768},
+ { 4351, 8125,11398,14236,16697,18831,20682,22287,23679,24886,25933,26841,27628,28311,28903,32768},
+ { 4096, 7680,10816,13560,15961,18062,19900,21508,22915,24146,25224,26167,26992,27714,28346,32768},
+ { 3840, 7230,10223,12865,15197,17256,19074,20679,22096,23347,24451,25426,26287,27047,27718,32768},
+ { 3584, 6776, 9619,12151,14406,16414,18203,19796,21215,22479,23604,24606,25499,26294,27002,32768},
+ { 3328, 6318, 9004,11417,13585,15533,17283,18856,20269,21538,22678,23703,24624,25451,26194,32768},
+ { 3072, 5856, 8379,10665,12737,14615,16317,17859,19257,20524,21672,22712,23655,24509,25283,32768},
+ { 2816, 5390, 7743, 9894,11860,13657,15299,16800,18172,19426,20573,21621,22579,23455,24255,32768},
+ { 2560, 4920, 7096, 9102,10951,12656,14227,15676,17011,18242,19377,20423,21388,22277,23097,32768},
+ { 2304, 4446, 6437, 8288,10009,11609,13097,14480,15766,16961,18072,19105,20066,20959,21789,32768},
+ { 2048, 3968, 5768, 7456, 9038,10521,11911,13215,14437,15583,16657,17664,18608,19493,20323,32768},
+ { 1792, 3486, 5087, 6601, 8032, 9385,10664,11873,13016,14096,15117,16082,16995,17858,18673,32768},
+ { 1536, 3000, 4395, 5725, 6993, 8201, 9353,10451,11497,12494,13444,14350,15213,16036,16820,32768},
+ { 1280, 2510, 3692, 4828, 5919, 6968, 7976, 8944, 9875,10769,11628,12454,13248,14011,14744,32768},
+ { 1024, 2016, 2977, 3908, 4810, 5684, 6530, 7350, 8144, 8913, 9658,10380,11080,11758,12415,32768},
+ { 768, 1518, 2250, 2965, 3663, 4345, 5011, 5662, 6297, 6917, 7523, 8115, 8693, 9257, 9808,32768},
+ { 512, 1016, 1512, 2000, 2481, 2954, 3420, 3879, 4330, 4774, 5211, 5642, 6066, 6483, 6894,32768},
+ { 256, 510, 762, 1012, 1260, 1506, 1750, 1992, 2232, 2471, 2708, 2943, 3176, 3407, 3636,32768},
+};
+
+const ent_rng ff_daalaent_laplace_offset[] = {
+ 0, 29871, 28672, 27751, 26975, 26291, 25673, 25105,
+ 24576, 24079, 23609, 23162, 22734, 22325, 21931, 21550,
+ 21182, 20826, 20480, 20143, 19815, 19495, 19183, 18877,
+ 18579, 18286, 17999, 17718, 17442, 17170, 16904, 16642,
+ 16384, 16129, 15879, 15633, 15390, 15150, 14913, 14680,
+ 14450, 14222, 13997, 13775, 13556, 13338, 13124, 12911,
+ 12701, 12493, 12288, 12084, 11882, 11682, 11484, 11288,
+ 11094, 10901, 10710, 10521, 10333, 10147, 9962, 9779,
+ 9597, 9417, 9238, 9060, 8884, 8709, 8535, 8363,
+ 8192, 8021, 7853, 7685, 7518, 7352, 7188, 7025,
+ 6862, 6701, 6540, 6381, 6222, 6065, 5908, 5753,
+ 5598, 5444, 5291, 5138, 4987, 4837, 4687, 4538,
+ 4390, 4242, 4096, 3950, 3804, 3660, 3516, 3373,
+ 3231, 3089, 2948, 2808, 2668, 2529, 2391, 2253,
+ 2116, 1979, 1843, 1708, 1573, 1439, 1306, 1172,
+ 1040, 908, 777, 646, 516, 386, 257, 128,
+};
+
+static const uint8_t daala_zigzag4x4[][2] = {
+ { 0, 1 }, { 1, 0 }, { 1, 1 }, { 0, 2 },
+ { 2, 0 }, { 1, 2 }, { 0, 3 }, { 2, 1 },
+ { 3, 0 }, { 1, 3 }, { 3, 1 }, { 2, 2 },
+ { 2, 3 }, { 3, 2 }, { 3, 3 }
+};
+
+static const uint8_t daala_zigzag8x8[][2] = {
+ { 4, 0 }, { 5, 0 }, { 4, 1 }, { 6, 0 },
+ { 5, 1 }, { 7, 0 }, { 6, 1 }, { 7, 1 },
+ { 0, 4 }, { 0, 5 }, { 1, 4 }, { 0, 6 },
+ { 1, 5 }, { 0, 7 }, { 1, 6 }, { 1, 7 },
+ { 2, 4 }, { 4, 2 }, { 2, 5 }, { 3, 4 },
+ { 4, 3 }, { 5, 2 }, { 3, 5 }, { 2, 6 },
+ { 4, 4 }, { 5, 3 }, { 2, 7 }, { 6, 2 },
+ { 3, 6 }, { 4, 5 }, { 7, 2 }, { 5, 4 },
+ { 6, 3 }, { 3, 7 }, { 5, 5 }, { 7, 3 },
+ { 4, 6 }, { 6, 4 }, { 4, 7 }, { 5, 6 },
+ { 7, 4 }, { 6, 5 }, { 5, 7 }, { 7, 5 },
+ { 6, 6 }, { 6, 7 }, { 7, 6 }, { 7, 7 }
+};
+
+static const uint8_t daala_zigzag16x16[][2] = {
+ { 8, 0 }, { 9, 0 }, { 8, 1 }, { 10, 0 },
+ { 9, 1 }, { 11, 0 }, { 8, 2 }, { 10, 1 },
+ { 12, 0 }, { 8, 3 }, { 9, 2 }, { 13, 0 },
+ { 11, 1 }, { 9, 3 }, { 10, 2 }, { 14, 0 },
+ { 15, 0 }, { 12, 1 }, { 10, 3 }, { 11, 2 },
+ { 13, 1 }, { 11, 3 }, { 12, 2 }, { 14, 1 },
+ { 15, 1 }, { 13, 2 }, { 12, 3 }, { 14, 2 },
+ { 13, 3 }, { 15, 2 }, { 14, 3 }, { 15, 3 },
+ { 0, 8 }, { 0, 9 }, { 1, 8 }, { 0, 10 },
+ { 1, 9 }, { 2, 8 }, { 0, 11 }, { 1, 10 },
+ { 0, 12 }, { 3, 8 }, { 2, 9 }, { 0, 13 },
+ { 1, 11 }, { 2, 10 }, { 3, 9 }, { 0, 14 },
+ { 1, 12 }, { 0, 15 }, { 2, 11 }, { 3, 10 },
+ { 1, 13 }, { 2, 12 }, { 1, 14 }, { 3, 11 },
+ { 1, 15 }, { 2, 13 }, { 3, 12 }, { 2, 14 },
+ { 2, 15 }, { 3, 13 }, { 3, 14 }, { 3, 15 },
+ { 4, 8 }, { 8, 4 }, { 5, 8 }, { 4, 9 },
+ { 8, 5 }, { 9, 4 }, { 5, 9 }, { 6, 8 },
+ { 4, 10 }, { 8, 6 }, { 9, 5 }, { 5, 10 },
+ { 6, 9 }, { 10, 4 }, { 7, 8 }, { 4, 11 },
+ { 8, 7 }, { 9, 6 }, { 10, 5 }, { 5, 11 },
+ { 7, 9 }, { 6, 10 }, { 4, 12 }, { 8, 8 },
+ { 11, 4 }, { 9, 7 }, { 10, 6 }, { 4, 13 },
+ { 6, 11 }, { 7, 10 }, { 11, 5 }, { 5, 12 },
+ { 8, 9 }, { 9, 8 }, { 12, 4 }, { 10, 7 },
+ { 4, 14 }, { 5, 13 }, { 11, 6 }, { 4, 15 },
+ { 6, 12 }, { 7, 11 }, { 8, 10 }, { 12, 5 },
+ { 9, 9 }, { 13, 4 }, { 10, 8 }, { 5, 14 },
+ { 11, 7 }, { 6, 13 }, { 5, 15 }, { 7, 12 },
+ { 14, 4 }, { 13, 5 }, { 12, 6 }, { 8, 11 },
+ { 9, 10 }, { 15, 4 }, { 10, 9 }, { 6, 14 },
+ { 11, 8 }, { 7, 13 }, { 14, 5 }, { 6, 15 },
+ { 12, 7 }, { 13, 6 }, { 8, 12 }, { 9, 11 },
+ { 15, 5 }, { 10, 10 }, { 7, 14 }, { 11, 9 },
+ { 13, 7 }, { 14, 6 }, { 7, 15 }, { 12, 8 },
+ { 8, 13 }, { 9, 12 }, { 15, 6 }, { 10, 11 },
+ { 8, 14 }, { 11, 10 }, { 14, 7 }, { 13, 8 },
+ { 12, 9 }, { 8, 15 }, { 9, 13 }, { 15, 7 },
+ { 10, 12 }, { 11, 11 }, { 14, 8 }, { 13, 9 },
+ { 9, 14 }, { 12, 10 }, { 15, 8 }, { 9, 15 },
+ { 10, 13 }, { 11, 12 }, { 12, 11 }, { 14, 9 },
+ { 13, 10 }, { 10, 14 }, { 15, 9 }, { 10, 15 },
+ { 11, 13 }, { 14, 10 }, { 12, 12 }, { 13, 11 },
+ { 15, 10 }, { 11, 14 }, { 11, 15 }, { 12, 13 },
+ { 14, 11 }, { 13, 12 }, { 15, 11 }, { 12, 14 },
+ { 13, 13 }, { 14, 12 }, { 12, 15 }, { 15, 12 },
+ { 13, 14 }, { 14, 13 }, { 13, 15 }, { 15, 13 },
+ { 14, 14 }, { 14, 15 }, { 15, 14 }, { 15, 15 }
+};
+
+static const uint8_t daala_zigzag32x32[][2] = {
+ { 16, 0 }, { 17, 0 }, { 18, 0 }, { 19, 0 },
+ { 16, 1 }, { 17, 1 }, { 20, 0 }, { 16, 2 },
+ { 18, 1 }, { 21, 0 }, { 17, 2 }, { 16, 3 },
+ { 19, 1 }, { 22, 0 }, { 18, 2 }, { 17, 3 },
+ { 20, 1 }, { 16, 4 }, { 23, 0 }, { 19, 2 },
+ { 24, 0 }, { 16, 5 }, { 21, 1 }, { 17, 4 },
+ { 18, 3 }, { 20, 2 }, { 17, 5 }, { 16, 6 },
+ { 19, 3 }, { 18, 4 }, { 25, 0 }, { 22, 1 },
+ { 16, 7 }, { 21, 2 }, { 17, 6 }, { 20, 3 },
+ { 26, 0 }, { 18, 5 }, { 19, 4 }, { 17, 7 },
+ { 23, 1 }, { 22, 2 }, { 18, 6 }, { 27, 0 },
+ { 19, 5 }, { 24, 1 }, { 21, 3 }, { 28, 0 },
+ { 20, 4 }, { 18, 7 }, { 19, 6 }, { 23, 2 },
+ { 29, 0 }, { 25, 1 }, { 21, 4 }, { 30, 0 },
+ { 20, 5 }, { 22, 3 }, { 31, 0 }, { 19, 7 },
+ { 24, 2 }, { 26, 1 }, { 20, 6 }, { 21, 5 },
+ { 22, 4 }, { 23, 3 }, { 27, 1 }, { 25, 2 },
+ { 20, 7 }, { 28, 1 }, { 24, 3 }, { 21, 6 },
+ { 22, 5 }, { 23, 4 }, { 26, 2 }, { 21, 7 },
+ { 29, 1 }, { 25, 3 }, { 30, 1 }, { 27, 2 },
+ { 22, 6 }, { 23, 5 }, { 31, 1 }, { 24, 4 },
+ { 26, 3 }, { 28, 2 }, { 22, 7 }, { 23, 6 },
+ { 25, 4 }, { 24, 5 }, { 29, 2 }, { 30, 2 },
+ { 27, 3 }, { 23, 7 }, { 31, 2 }, { 24, 6 },
+ { 26, 4 }, { 25, 5 }, { 28, 3 }, { 24, 7 },
+ { 27, 4 }, { 29, 3 }, { 25, 6 }, { 26, 5 },
+ { 30, 3 }, { 31, 3 }, { 28, 4 }, { 27, 5 },
+ { 25, 7 }, { 29, 4 }, { 26, 6 }, { 28, 5 },
+ { 30, 4 }, { 26, 7 }, { 27, 6 }, { 31, 4 },
+ { 29, 5 }, { 27, 7 }, { 30, 5 }, { 28, 6 },
+ { 31, 5 }, { 29, 6 }, { 28, 7 }, { 30, 6 },
+ { 31, 6 }, { 29, 7 }, { 30, 7 }, { 31, 7 },
+ { 0, 16 }, { 0, 17 }, { 1, 16 }, { 0, 18 },
+ { 1, 17 }, { 0, 19 }, { 2, 16 }, { 1, 18 },
+ { 0, 20 }, { 2, 17 }, { 3, 16 }, { 1, 19 },
+ { 2, 18 }, { 0, 21 }, { 3, 17 }, { 4, 16 },
+ { 1, 20 }, { 2, 19 }, { 0, 22 }, { 3, 18 },
+ { 4, 17 }, { 5, 16 }, { 0, 23 }, { 3, 19 },
+ { 2, 20 }, { 1, 21 }, { 4, 18 }, { 6, 16 },
+ { 5, 17 }, { 3, 20 }, { 2, 21 }, { 1, 22 },
+ { 0, 24 }, { 0, 25 }, { 4, 19 }, { 7, 16 },
+ { 6, 17 }, { 5, 18 }, { 0, 26 }, { 3, 21 },
+ { 2, 22 }, { 1, 23 }, { 4, 20 }, { 5, 19 },
+ { 6, 18 }, { 1, 24 }, { 7, 17 }, { 0, 27 },
+ { 2, 23 }, { 3, 22 }, { 4, 21 }, { 1, 25 },
+ { 5, 20 }, { 7, 18 }, { 0, 28 }, { 6, 19 },
+ { 2, 24 }, { 1, 26 }, { 0, 29 }, { 4, 22 },
+ { 3, 23 }, { 2, 25 }, { 5, 21 }, { 0, 31 },
+ { 7, 19 }, { 6, 20 }, { 0, 30 }, { 1, 27 },
+ { 3, 24 }, { 2, 26 }, { 4, 23 }, { 5, 22 },
+ { 7, 20 }, { 1, 28 }, { 6, 21 }, { 3, 25 },
+ { 2, 27 }, { 1, 29 }, { 4, 24 }, { 2, 28 },
+ { 1, 30 }, { 7, 21 }, { 5, 23 }, { 3, 26 },
+ { 6, 22 }, { 1, 31 }, { 4, 25 }, { 7, 22 },
+ { 3, 27 }, { 2, 29 }, { 2, 30 }, { 5, 24 },
+ { 2, 31 }, { 6, 23 }, { 4, 26 }, { 3, 28 },
+ { 5, 25 }, { 3, 29 }, { 6, 24 }, { 7, 23 },
+ { 3, 30 }, { 4, 27 }, { 3, 31 }, { 5, 26 },
+ { 6, 25 }, { 4, 28 }, { 7, 24 }, { 4, 29 },
+ { 5, 27 }, { 4, 30 }, { 4, 31 }, { 6, 26 },
+ { 5, 28 }, { 7, 25 }, { 6, 27 }, { 5, 29 },
+ { 7, 26 }, { 5, 30 }, { 5, 31 }, { 6, 28 },
+ { 7, 27 }, { 6, 29 }, { 6, 30 }, { 7, 28 },
+ { 6, 31 }, { 7, 29 }, { 7, 30 }, { 7, 31 },
+ { 8, 16 }, { 9, 16 }, { 8, 17 }, { 10, 16 },
+ { 9, 17 }, { 16, 8 }, { 8, 18 }, { 16, 9 },
+ { 10, 17 }, { 11, 16 }, { 17, 8 }, { 9, 18 },
+ { 8, 19 }, { 16, 10 }, { 11, 17 }, { 12, 16 },
+ { 10, 18 }, { 17, 9 }, { 9, 19 }, { 16, 11 },
+ { 8, 20 }, { 18, 8 }, { 17, 10 }, { 10, 19 },
+ { 12, 17 }, { 11, 18 }, { 9, 20 }, { 16, 12 },
+ { 18, 9 }, { 8, 21 }, { 13, 16 }, { 17, 11 },
+ { 19, 8 }, { 18, 10 }, { 13, 17 }, { 16, 13 },
+ { 11, 19 }, { 12, 18 }, { 10, 20 }, { 17, 12 },
+ { 9, 21 }, { 19, 9 }, { 8, 22 }, { 14, 16 },
+ { 18, 11 }, { 11, 20 }, { 10, 21 }, { 20, 8 },
+ { 13, 18 }, { 16, 14 }, { 12, 19 }, { 17, 13 },
+ { 19, 10 }, { 14, 17 }, { 9, 22 }, { 18, 12 },
+ { 8, 23 }, { 17, 14 }, { 20, 9 }, { 15, 16 },
+ { 16, 15 }, { 13, 19 }, { 10, 22 }, { 19, 11 },
+ { 11, 21 }, { 14, 18 }, { 12, 20 }, { 18, 13 },
+ { 20, 10 }, { 21, 8 }, { 15, 17 }, { 9, 23 },
+ { 19, 12 }, { 11, 22 }, { 8, 24 }, { 21, 9 },
+ { 17, 15 }, { 16, 16 }, { 14, 19 }, { 18, 14 },
+ { 12, 21 }, { 13, 20 }, { 20, 11 }, { 10, 23 },
+ { 19, 13 }, { 15, 18 }, { 16, 17 }, { 21, 10 },
+ { 22, 8 }, { 9, 24 }, { 8, 25 }, { 20, 12 },
+ { 15, 19 }, { 11, 23 }, { 17, 16 }, { 18, 15 },
+ { 14, 20 }, { 12, 22 }, { 10, 24 }, { 22, 9 },
+ { 21, 11 }, { 19, 14 }, { 13, 21 }, { 16, 18 },
+ { 9, 25 }, { 17, 17 }, { 8, 26 }, { 20, 13 },
+ { 23, 8 }, { 12, 23 }, { 13, 22 }, { 22, 10 },
+ { 19, 15 }, { 15, 20 }, { 16, 19 }, { 21, 12 },
+ { 11, 24 }, { 14, 21 }, { 8, 27 }, { 18, 16 },
+ { 10, 25 }, { 9, 26 }, { 22, 11 }, { 20, 14 },
+ { 23, 9 }, { 18, 17 }, { 17, 18 }, { 17, 19 },
+ { 19, 16 }, { 21, 13 }, { 10, 26 }, { 12, 24 },
+ { 23, 10 }, { 24, 8 }, { 8, 28 }, { 16, 20 },
+ { 9, 27 }, { 15, 21 }, { 22, 12 }, { 14, 22 },
+ { 13, 23 }, { 20, 15 }, { 11, 25 }, { 24, 9 },
+ { 18, 18 }, { 19, 17 }, { 23, 11 }, { 10, 27 },
+ { 8, 29 }, { 12, 25 }, { 9, 28 }, { 8, 30 },
+ { 21, 14 }, { 13, 24 }, { 11, 26 }, { 25, 8 },
+ { 24, 10 }, { 20, 16 }, { 19, 18 }, { 14, 23 },
+ { 22, 13 }, { 8, 31 }, { 17, 20 }, { 9, 29 },
+ { 23, 12 }, { 15, 22 }, { 25, 9 }, { 11, 27 },
+ { 10, 28 }, { 20, 17 }, { 21, 15 }, { 18, 19 },
+ { 16, 21 }, { 24, 11 }, { 9, 30 }, { 12, 26 },
+ { 10, 29 }, { 22, 14 }, { 14, 24 }, { 9, 31 },
+ { 26, 8 }, { 13, 25 }, { 25, 10 }, { 18, 20 },
+ { 19, 19 }, { 11, 28 }, { 15, 23 }, { 20, 18 },
+ { 10, 30 }, { 12, 27 }, { 17, 21 }, { 23, 13 },
+ { 24, 12 }, { 21, 16 }, { 16, 22 }, { 26, 9 },
+ { 27, 8 }, { 13, 26 }, { 22, 15 }, { 10, 31 },
+ { 14, 25 }, { 12, 28 }, { 25, 11 }, { 21, 17 },
+ { 26, 10 }, { 20, 19 }, { 11, 29 }, { 15, 24 },
+ { 23, 14 }, { 27, 9 }, { 11, 30 }, { 13, 27 },
+ { 19, 20 }, { 24, 13 }, { 28, 8 }, { 11, 31 },
+ { 22, 16 }, { 17, 22 }, { 16, 23 }, { 25, 12 },
+ { 18, 21 }, { 12, 29 }, { 21, 18 }, { 28, 9 },
+ { 27, 10 }, { 26, 11 }, { 29, 8 }, { 14, 26 },
+ { 15, 25 }, { 13, 28 }, { 12, 30 }, { 23, 15 },
+ { 30, 8 }, { 16, 24 }, { 13, 29 }, { 25, 13 },
+ { 24, 14 }, { 20, 20 }, { 31, 8 }, { 12, 31 },
+ { 14, 27 }, { 28, 10 }, { 26, 12 }, { 22, 17 },
+ { 21, 19 }, { 17, 23 }, { 18, 22 }, { 29, 9 },
+ { 27, 11 }, { 19, 21 }, { 27, 12 }, { 30, 9 },
+ { 31, 9 }, { 13, 30 }, { 24, 15 }, { 23, 16 },
+ { 15, 26 }, { 14, 28 }, { 29, 10 }, { 28, 11 },
+ { 26, 13 }, { 17, 24 }, { 13, 31 }, { 25, 14 },
+ { 22, 18 }, { 16, 25 }, { 30, 10 }, { 14, 29 },
+ { 15, 27 }, { 19, 22 }, { 21, 20 }, { 20, 21 },
+ { 27, 13 }, { 29, 11 }, { 18, 23 }, { 23, 17 },
+ { 16, 26 }, { 31, 10 }, { 24, 16 }, { 14, 30 },
+ { 22, 19 }, { 14, 31 }, { 28, 12 }, { 26, 14 },
+ { 30, 11 }, { 15, 28 }, { 25, 15 }, { 17, 25 },
+ { 23, 18 }, { 18, 24 }, { 15, 30 }, { 29, 12 },
+ { 31, 11 }, { 16, 27 }, { 24, 17 }, { 28, 13 },
+ { 19, 23 }, { 15, 29 }, { 25, 16 }, { 17, 26 },
+ { 27, 14 }, { 22, 20 }, { 15, 31 }, { 20, 22 },
+ { 21, 21 }, { 16, 28 }, { 17, 27 }, { 30, 12 },
+ { 26, 15 }, { 19, 24 }, { 18, 25 }, { 23, 19 },
+ { 29, 13 }, { 31, 12 }, { 24, 18 }, { 26, 16 },
+ { 25, 17 }, { 16, 29 }, { 28, 14 }, { 20, 23 },
+ { 18, 26 }, { 21, 22 }, { 19, 25 }, { 22, 21 },
+ { 27, 15 }, { 17, 28 }, { 16, 30 }, { 26, 17 },
+ { 23, 20 }, { 16, 31 }, { 25, 18 }, { 27, 16 },
+ { 20, 24 }, { 24, 19 }, { 31, 13 }, { 30, 13 },
+ { 29, 14 }, { 18, 27 }, { 28, 15 }, { 17, 29 },
+ { 19, 26 }, { 17, 30 }, { 21, 23 }, { 22, 22 },
+ { 30, 14 }, { 20, 25 }, { 23, 21 }, { 17, 31 },
+ { 18, 28 }, { 25, 19 }, { 24, 20 }, { 28, 16 },
+ { 31, 14 }, { 26, 18 }, { 19, 27 }, { 29, 15 },
+ { 27, 17 }, { 30, 15 }, { 21, 24 }, { 22, 23 },
+ { 26, 19 }, { 23, 22 }, { 28, 17 }, { 29, 16 },
+ { 18, 30 }, { 24, 21 }, { 25, 20 }, { 18, 31 },
+ { 18, 29 }, { 20, 26 }, { 19, 28 }, { 27, 18 },
+ { 31, 15 }, { 20, 27 }, { 30, 16 }, { 19, 29 },
+ { 29, 17 }, { 31, 16 }, { 27, 19 }, { 21, 25 },
+ { 28, 18 }, { 26, 20 }, { 22, 24 }, { 25, 21 },
+ { 19, 30 }, { 24, 22 }, { 30, 17 }, { 21, 26 },
+ { 23, 23 }, { 19, 31 }, { 20, 28 }, { 31, 17 },
+ { 28, 19 }, { 27, 20 }, { 21, 27 }, { 29, 18 },
+ { 30, 18 }, { 25, 22 }, { 26, 21 }, { 20, 29 },
+ { 22, 25 }, { 24, 23 }, { 29, 19 }, { 23, 24 },
+ { 20, 31 }, { 20, 30 }, { 28, 20 }, { 21, 28 },
+ { 22, 26 }, { 31, 18 }, { 27, 21 }, { 30, 19 },
+ { 22, 27 }, { 29, 20 }, { 23, 25 }, { 24, 24 },
+ { 26, 22 }, { 21, 29 }, { 25, 23 }, { 31, 19 },
+ { 21, 30 }, { 23, 26 }, { 28, 21 }, { 21, 31 },
+ { 22, 28 }, { 30, 20 }, { 25, 24 }, { 27, 22 },
+ { 29, 21 }, { 26, 23 }, { 24, 25 }, { 31, 20 },
+ { 23, 27 }, { 22, 29 }, { 30, 21 }, { 28, 22 },
+ { 24, 26 }, { 25, 25 }, { 27, 23 }, { 22, 30 },
+ { 23, 28 }, { 22, 31 }, { 26, 24 }, { 31, 21 },
+ { 24, 27 }, { 29, 22 }, { 27, 24 }, { 30, 22 },
+ { 25, 26 }, { 28, 23 }, { 23, 30 }, { 23, 29 },
+ { 24, 28 }, { 25, 27 }, { 31, 22 }, { 23, 31 },
+ { 26, 25 }, { 28, 24 }, { 29, 23 }, { 24, 29 },
+ { 24, 30 }, { 27, 25 }, { 25, 28 }, { 26, 26 },
+ { 30, 23 }, { 26, 27 }, { 31, 23 }, { 28, 25 },
+ { 27, 26 }, { 25, 29 }, { 24, 31 }, { 29, 24 },
+ { 30, 24 }, { 27, 27 }, { 29, 25 }, { 26, 28 },
+ { 31, 24 }, { 25, 30 }, { 25, 31 }, { 28, 26 },
+ { 27, 28 }, { 26, 29 }, { 30, 25 }, { 29, 26 },
+ { 28, 27 }, { 26, 30 }, { 31, 25 }, { 27, 29 },
+ { 26, 31 }, { 30, 26 }, { 28, 28 }, { 31, 26 },
+ { 29, 27 }, { 27, 30 }, { 28, 29 }, { 27, 31 },
+ { 30, 27 }, { 31, 27 }, { 28, 30 }, { 29, 28 },
+ { 30, 28 }, { 29, 29 }, { 30, 29 }, { 31, 28 },
+ { 28, 31 }, { 29, 30 }, { 29, 31 }, { 31, 29 },
+ { 30, 30 }, { 30, 31 }, { 31, 30 }, { 31, 31 }
+};
+
+static const uint8_t daala_zigzag64x64[][2] = {
+ { 32, 0 }, { 33, 0 }, { 34, 0 }, { 35, 0 },
+ { 32, 1 }, { 36, 0 }, { 33, 1 }, { 37, 0 },
+ { 34, 1 }, { 32, 2 }, { 38, 0 }, { 35, 1 },
+ { 33, 2 }, { 39, 0 }, { 36, 1 }, { 34, 2 },
+ { 40, 0 }, { 32, 3 }, { 37, 1 }, { 35, 2 },
+ { 41, 0 }, { 33, 3 }, { 38, 1 }, { 36, 2 },
+ { 42, 0 }, { 32, 4 }, { 34, 3 }, { 39, 1 },
+ { 37, 2 }, { 43, 0 }, { 35, 3 }, { 33, 4 },
+ { 40, 1 }, { 44, 0 }, { 38, 2 }, { 32, 5 },
+ { 36, 3 }, { 41, 1 }, { 34, 4 }, { 45, 0 },
+ { 39, 2 }, { 33, 5 }, { 42, 1 }, { 37, 3 },
+ { 35, 4 }, { 46, 0 }, { 40, 2 }, { 32, 6 },
+ { 43, 1 }, { 34, 5 }, { 38, 3 }, { 36, 4 },
+ { 47, 0 }, { 41, 2 }, { 33, 6 }, { 44, 1 },
+ { 39, 3 }, { 35, 5 }, { 37, 4 }, { 48, 0 },
+ { 32, 7 }, { 42, 2 }, { 34, 6 }, { 45, 1 },
+ { 40, 3 }, { 36, 5 }, { 38, 4 }, { 49, 0 },
+ { 33, 7 }, { 43, 2 }, { 46, 1 }, { 35, 6 },
+ { 41, 3 }, { 37, 5 }, { 39, 4 }, { 32, 8 },
+ { 50, 0 }, { 44, 2 }, { 34, 7 }, { 47, 1 },
+ { 36, 6 }, { 42, 3 }, { 38, 5 }, { 51, 0 },
+ { 40, 4 }, { 33, 8 }, { 45, 2 }, { 35, 7 },
+ { 48, 1 }, { 32, 9 }, { 37, 6 }, { 43, 3 },
+ { 52, 0 }, { 39, 5 }, { 41, 4 }, { 34, 8 },
+ { 46, 2 }, { 49, 1 }, { 36, 7 }, { 33, 9 },
+ { 44, 3 }, { 53, 0 }, { 38, 6 }, { 42, 4 },
+ { 40, 5 }, { 35, 8 }, { 32, 10 }, { 47, 2 },
+ { 50, 1 }, { 37, 7 }, { 54, 0 }, { 45, 3 },
+ { 34, 9 }, { 39, 6 }, { 43, 4 }, { 41, 5 },
+ { 48, 2 }, { 36, 8 }, { 51, 1 }, { 33, 10 },
+ { 55, 0 }, { 38, 7 }, { 46, 3 }, { 32, 11 },
+ { 40, 6 }, { 35, 9 }, { 44, 4 }, { 42, 5 },
+ { 49, 2 }, { 52, 1 }, { 37, 8 }, { 34, 10 },
+ { 56, 0 }, { 39, 7 }, { 47, 3 }, { 41, 6 },
+ { 33, 11 }, { 45, 4 }, { 36, 9 }, { 43, 5 },
+ { 50, 2 }, { 53, 1 }, { 32, 12 }, { 38, 8 },
+ { 57, 0 }, { 35, 10 }, { 48, 3 }, { 40, 7 },
+ { 42, 6 }, { 46, 4 }, { 44, 5 }, { 34, 11 },
+ { 37, 9 }, { 51, 2 }, { 54, 1 }, { 58, 0 },
+ { 39, 8 }, { 33, 12 }, { 36, 10 }, { 49, 3 },
+ { 41, 7 }, { 32, 13 }, { 47, 4 }, { 43, 6 },
+ { 45, 5 }, { 52, 2 }, { 55, 1 }, { 38, 9 },
+ { 35, 11 }, { 59, 0 }, { 40, 8 }, { 34, 12 },
+ { 50, 3 }, { 37, 10 }, { 42, 7 }, { 48, 4 },
+ { 44, 6 }, { 33, 13 }, { 56, 1 }, { 53, 2 },
+ { 46, 5 }, { 60, 0 }, { 39, 9 }, { 36, 11 },
+ { 32, 14 }, { 41, 8 }, { 51, 3 }, { 35, 12 },
+ { 38, 10 }, { 43, 7 }, { 49, 4 }, { 57, 1 },
+ { 54, 2 }, { 45, 6 }, { 47, 5 }, { 61, 0 },
+ { 34, 13 }, { 40, 9 }, { 37, 11 }, { 33, 14 },
+ { 52, 3 }, { 42, 8 }, { 36, 12 }, { 32, 15 },
+ { 39, 10 }, { 44, 7 }, { 58, 1 }, { 50, 4 },
+ { 55, 2 }, { 62, 0 }, { 46, 6 }, { 48, 5 },
+ { 35, 13 }, { 41, 9 }, { 38, 11 }, { 53, 3 },
+ { 34, 14 }, { 43, 8 }, { 59, 1 }, { 63, 0 },
+ { 56, 2 }, { 51, 4 }, { 37, 12 }, { 45, 7 },
+ { 40, 10 }, { 33, 15 }, { 47, 6 }, { 49, 5 },
+ { 36, 13 }, { 42, 9 }, { 39, 11 }, { 54, 3 },
+ { 44, 8 }, { 60, 1 }, { 35, 14 }, { 57, 2 },
+ { 52, 4 }, { 46, 7 }, { 41, 10 }, { 38, 12 },
+ { 50, 5 }, { 48, 6 }, { 34, 15 }, { 43, 9 },
+ { 37, 13 }, { 55, 3 }, { 40, 11 }, { 61, 1 },
+ { 45, 8 }, { 58, 2 }, { 53, 4 }, { 36, 14 },
+ { 47, 7 }, { 51, 5 }, { 42, 10 }, { 49, 6 },
+ { 39, 12 }, { 35, 15 }, { 56, 3 }, { 44, 9 },
+ { 38, 13 }, { 62, 1 }, { 41, 11 }, { 59, 2 },
+ { 46, 8 }, { 54, 4 }, { 48, 7 }, { 37, 14 },
+ { 52, 5 }, { 50, 6 }, { 43, 10 }, { 40, 12 },
+ { 36, 15 }, { 57, 3 }, { 45, 9 }, { 63, 1 },
+ { 39, 13 }, { 60, 2 }, { 42, 11 }, { 47, 8 },
+ { 55, 4 }, { 49, 7 }, { 53, 5 }, { 51, 6 },
+ { 38, 14 }, { 44, 10 }, { 41, 12 }, { 58, 3 },
+ { 37, 15 }, { 46, 9 }, { 61, 2 }, { 40, 13 },
+ { 56, 4 }, { 43, 11 }, { 48, 8 }, { 54, 5 },
+ { 50, 7 }, { 52, 6 }, { 39, 14 }, { 45, 10 },
+ { 59, 3 }, { 42, 12 }, { 62, 2 }, { 47, 9 },
+ { 38, 15 }, { 57, 4 }, { 41, 13 }, { 44, 11 },
+ { 49, 8 }, { 55, 5 }, { 51, 7 }, { 53, 6 },
+ { 60, 3 }, { 46, 10 }, { 40, 14 }, { 43, 12 },
+ { 63, 2 }, { 48, 9 }, { 58, 4 }, { 39, 15 },
+ { 50, 8 }, { 45, 11 }, { 42, 13 }, { 56, 5 },
+ { 52, 7 }, { 54, 6 }, { 61, 3 }, { 47, 10 },
+ { 41, 14 }, { 44, 12 }, { 49, 9 }, { 59, 4 },
+ { 40, 15 }, { 51, 8 }, { 57, 5 }, { 46, 11 },
+ { 43, 13 }, { 53, 7 }, { 55, 6 }, { 62, 3 },
+ { 48, 10 }, { 42, 14 }, { 45, 12 }, { 60, 4 },
+ { 50, 9 }, { 52, 8 }, { 58, 5 }, { 41, 15 },
+ { 47, 11 }, { 54, 7 }, { 56, 6 }, { 44, 13 },
+ { 63, 3 }, { 49, 10 }, { 43, 14 }, { 61, 4 },
+ { 46, 12 }, { 51, 9 }, { 59, 5 }, { 53, 8 },
+ { 42, 15 }, { 57, 6 }, { 55, 7 }, { 48, 11 },
+ { 45, 13 }, { 50, 10 }, { 62, 4 }, { 44, 14 },
+ { 47, 12 }, { 52, 9 }, { 60, 5 }, { 54, 8 },
+ { 58, 6 }, { 56, 7 }, { 49, 11 }, { 43, 15 },
+ { 46, 13 }, { 51, 10 }, { 63, 4 }, { 48, 12 },
+ { 45, 14 }, { 53, 9 }, { 61, 5 }, { 55, 8 },
+ { 59, 6 }, { 57, 7 }, { 50, 11 }, { 44, 15 },
+ { 47, 13 }, { 52, 10 }, { 54, 9 }, { 49, 12 },
+ { 62, 5 }, { 46, 14 }, { 56, 8 }, { 60, 6 },
+ { 58, 7 }, { 51, 11 }, { 45, 15 }, { 48, 13 },
+ { 53, 10 }, { 63, 5 }, { 55, 9 }, { 50, 12 },
+ { 47, 14 }, { 57, 8 }, { 61, 6 }, { 59, 7 },
+ { 52, 11 }, { 46, 15 }, { 49, 13 }, { 54, 10 },
+ { 56, 9 }, { 51, 12 }, { 62, 6 }, { 58, 8 },
+ { 48, 14 }, { 60, 7 }, { 53, 11 }, { 47, 15 },
+ { 50, 13 }, { 55, 10 }, { 57, 9 }, { 63, 6 },
+ { 52, 12 }, { 59, 8 }, { 61, 7 }, { 49, 14 },
+ { 54, 11 }, { 51, 13 }, { 48, 15 }, { 56, 10 },
+ { 58, 9 }, { 60, 8 }, { 53, 12 }, { 62, 7 },
+ { 50, 14 }, { 55, 11 }, { 52, 13 }, { 49, 15 },
+ { 57, 10 }, { 59, 9 }, { 61, 8 }, { 63, 7 },
+ { 54, 12 }, { 51, 14 }, { 56, 11 }, { 53, 13 },
+ { 58, 10 }, { 50, 15 }, { 60, 9 }, { 62, 8 },
+ { 55, 12 }, { 52, 14 }, { 57, 11 }, { 59, 10 },
+ { 54, 13 }, { 51, 15 }, { 61, 9 }, { 63, 8 },
+ { 56, 12 }, { 53, 14 }, { 58, 11 }, { 60, 10 },
+ { 55, 13 }, { 52, 15 }, { 62, 9 }, { 57, 12 },
+ { 54, 14 }, { 59, 11 }, { 61, 10 }, { 56, 13 },
+ { 63, 9 }, { 53, 15 }, { 58, 12 }, { 55, 14 },
+ { 60, 11 }, { 62, 10 }, { 57, 13 }, { 54, 15 },
+ { 59, 12 }, { 56, 14 }, { 61, 11 }, { 63, 10 },
+ { 58, 13 }, { 55, 15 }, { 60, 12 }, { 57, 14 },
+ { 62, 11 }, { 59, 13 }, { 56, 15 }, { 61, 12 },
+ { 63, 11 }, { 58, 14 }, { 60, 13 }, { 57, 15 },
+ { 62, 12 }, { 59, 14 }, { 61, 13 }, { 58, 15 },
+ { 63, 12 }, { 60, 14 }, { 62, 13 }, { 59, 15 },
+ { 61, 14 }, { 63, 13 }, { 60, 15 }, { 62, 14 },
+ { 61, 15 }, { 63, 14 }, { 62, 15 }, { 63, 15 },
+ { 0, 32 }, { 0, 33 }, { 0, 34 }, { 0, 35 },
+ { 1, 32 }, { 0, 36 }, { 1, 33 }, { 0, 37 },
+ { 1, 34 }, { 2, 32 }, { 0, 38 }, { 1, 35 },
+ { 2, 33 }, { 0, 39 }, { 1, 36 }, { 2, 34 },
+ { 0, 40 }, { 3, 32 }, { 1, 37 }, { 2, 35 },
+ { 0, 41 }, { 3, 33 }, { 1, 38 }, { 2, 36 },
+ { 0, 42 }, { 4, 32 }, { 3, 34 }, { 1, 39 },
+ { 2, 37 }, { 0, 43 }, { 3, 35 }, { 4, 33 },
+ { 1, 40 }, { 0, 44 }, { 2, 38 }, { 5, 32 },
+ { 3, 36 }, { 1, 41 }, { 4, 34 }, { 0, 45 },
+ { 2, 39 }, { 5, 33 }, { 1, 42 }, { 3, 37 },
+ { 4, 35 }, { 0, 46 }, { 2, 40 }, { 6, 32 },
+ { 1, 43 }, { 5, 34 }, { 3, 38 }, { 4, 36 },
+ { 0, 47 }, { 2, 41 }, { 6, 33 }, { 1, 44 },
+ { 3, 39 }, { 5, 35 }, { 4, 37 }, { 0, 48 },
+ { 7, 32 }, { 2, 42 }, { 6, 34 }, { 1, 45 },
+ { 3, 40 }, { 5, 36 }, { 4, 38 }, { 0, 49 },
+ { 7, 33 }, { 2, 43 }, { 1, 46 }, { 6, 35 },
+ { 3, 41 }, { 5, 37 }, { 4, 39 }, { 8, 32 },
+ { 0, 50 }, { 2, 44 }, { 7, 34 }, { 1, 47 },
+ { 6, 36 }, { 3, 42 }, { 5, 38 }, { 0, 51 },
+ { 4, 40 }, { 8, 33 }, { 2, 45 }, { 7, 35 },
+ { 1, 48 }, { 9, 32 }, { 6, 37 }, { 3, 43 },
+ { 0, 52 }, { 5, 39 }, { 4, 41 }, { 8, 34 },
+ { 2, 46 }, { 1, 49 }, { 7, 36 }, { 9, 33 },
+ { 3, 44 }, { 0, 53 }, { 6, 38 }, { 4, 42 },
+ { 5, 40 }, { 8, 35 }, { 10, 32 }, { 2, 47 },
+ { 1, 50 }, { 7, 37 }, { 0, 54 }, { 3, 45 },
+ { 9, 34 }, { 6, 39 }, { 4, 43 }, { 5, 41 },
+ { 2, 48 }, { 8, 36 }, { 1, 51 }, { 10, 33 },
+ { 0, 55 }, { 7, 38 }, { 3, 46 }, { 11, 32 },
+ { 6, 40 }, { 9, 35 }, { 4, 44 }, { 5, 42 },
+ { 2, 49 }, { 1, 52 }, { 8, 37 }, { 10, 34 },
+ { 0, 56 }, { 7, 39 }, { 3, 47 }, { 6, 41 },
+ { 11, 33 }, { 4, 45 }, { 9, 36 }, { 5, 43 },
+ { 2, 50 }, { 1, 53 }, { 12, 32 }, { 8, 38 },
+ { 0, 57 }, { 10, 35 }, { 3, 48 }, { 7, 40 },
+ { 6, 42 }, { 4, 46 }, { 5, 44 }, { 11, 34 },
+ { 9, 37 }, { 2, 51 }, { 1, 54 }, { 0, 58 },
+ { 8, 39 }, { 12, 33 }, { 10, 36 }, { 3, 49 },
+ { 7, 41 }, { 13, 32 }, { 4, 47 }, { 6, 43 },
+ { 5, 45 }, { 2, 52 }, { 1, 55 }, { 9, 38 },
+ { 11, 35 }, { 0, 59 }, { 8, 40 }, { 12, 34 },
+ { 3, 50 }, { 10, 37 }, { 7, 42 }, { 4, 48 },
+ { 6, 44 }, { 13, 33 }, { 1, 56 }, { 2, 53 },
+ { 5, 46 }, { 0, 60 }, { 9, 39 }, { 11, 36 },
+ { 14, 32 }, { 8, 41 }, { 3, 51 }, { 12, 35 },
+ { 10, 38 }, { 7, 43 }, { 4, 49 }, { 1, 57 },
+ { 2, 54 }, { 6, 45 }, { 5, 47 }, { 0, 61 },
+ { 13, 34 }, { 9, 40 }, { 11, 37 }, { 14, 33 },
+ { 3, 52 }, { 8, 42 }, { 12, 36 }, { 15, 32 },
+ { 10, 39 }, { 7, 44 }, { 1, 58 }, { 4, 50 },
+ { 2, 55 }, { 0, 62 }, { 6, 46 }, { 5, 48 },
+ { 13, 35 }, { 9, 41 }, { 11, 38 }, { 3, 53 },
+ { 14, 34 }, { 8, 43 }, { 1, 59 }, { 0, 63 },
+ { 2, 56 }, { 4, 51 }, { 12, 37 }, { 7, 45 },
+ { 10, 40 }, { 15, 33 }, { 6, 47 }, { 5, 49 },
+ { 13, 36 }, { 9, 42 }, { 11, 39 }, { 3, 54 },
+ { 8, 44 }, { 1, 60 }, { 14, 35 }, { 2, 57 },
+ { 4, 52 }, { 7, 46 }, { 10, 41 }, { 12, 38 },
+ { 5, 50 }, { 6, 48 }, { 15, 34 }, { 9, 43 },
+ { 13, 37 }, { 3, 55 }, { 11, 40 }, { 1, 61 },
+ { 8, 45 }, { 2, 58 }, { 4, 53 }, { 14, 36 },
+ { 7, 47 }, { 5, 51 }, { 10, 42 }, { 6, 49 },
+ { 12, 39 }, { 15, 35 }, { 3, 56 }, { 9, 44 },
+ { 13, 38 }, { 1, 62 }, { 11, 41 }, { 2, 59 },
+ { 8, 46 }, { 4, 54 }, { 7, 48 }, { 14, 37 },
+ { 5, 52 }, { 6, 50 }, { 10, 43 }, { 12, 40 },
+ { 15, 36 }, { 3, 57 }, { 9, 45 }, { 1, 63 },
+ { 13, 39 }, { 2, 60 }, { 11, 42 }, { 8, 47 },
+ { 4, 55 }, { 7, 49 }, { 5, 53 }, { 6, 51 },
+ { 14, 38 }, { 10, 44 }, { 12, 41 }, { 3, 58 },
+ { 15, 37 }, { 9, 46 }, { 2, 61 }, { 13, 40 },
+ { 4, 56 }, { 11, 43 }, { 8, 48 }, { 5, 54 },
+ { 7, 50 }, { 6, 52 }, { 14, 39 }, { 10, 45 },
+ { 3, 59 }, { 12, 42 }, { 2, 62 }, { 9, 47 },
+ { 15, 38 }, { 4, 57 }, { 13, 41 }, { 11, 44 },
+ { 8, 49 }, { 5, 55 }, { 7, 51 }, { 6, 53 },
+ { 3, 60 }, { 10, 46 }, { 14, 40 }, { 12, 43 },
+ { 2, 63 }, { 9, 48 }, { 4, 58 }, { 15, 39 },
+ { 8, 50 }, { 11, 45 }, { 13, 42 }, { 5, 56 },
+ { 7, 52 }, { 6, 54 }, { 3, 61 }, { 10, 47 },
+ { 14, 41 }, { 12, 44 }, { 9, 49 }, { 4, 59 },
+ { 15, 40 }, { 8, 51 }, { 5, 57 }, { 11, 46 },
+ { 13, 43 }, { 7, 53 }, { 6, 55 }, { 3, 62 },
+ { 10, 48 }, { 14, 42 }, { 12, 45 }, { 4, 60 },
+ { 9, 50 }, { 8, 52 }, { 5, 58 }, { 15, 41 },
+ { 11, 47 }, { 7, 54 }, { 6, 56 }, { 13, 44 },
+ { 3, 63 }, { 10, 49 }, { 14, 43 }, { 4, 61 },
+ { 12, 46 }, { 9, 51 }, { 5, 59 }, { 8, 53 },
+ { 15, 42 }, { 6, 57 }, { 7, 55 }, { 11, 48 },
+ { 13, 45 }, { 10, 50 }, { 4, 62 }, { 14, 44 },
+ { 12, 47 }, { 9, 52 }, { 5, 60 }, { 8, 54 },
+ { 6, 58 }, { 7, 56 }, { 11, 49 }, { 15, 43 },
+ { 13, 46 }, { 10, 51 }, { 4, 63 }, { 12, 48 },
+ { 14, 45 }, { 9, 53 }, { 5, 61 }, { 8, 55 },
+ { 6, 59 }, { 7, 57 }, { 11, 50 }, { 15, 44 },
+ { 13, 47 }, { 10, 52 }, { 9, 54 }, { 12, 49 },
+ { 5, 62 }, { 14, 46 }, { 8, 56 }, { 6, 60 },
+ { 7, 58 }, { 11, 51 }, { 15, 45 }, { 13, 48 },
+ { 10, 53 }, { 5, 63 }, { 9, 55 }, { 12, 50 },
+ { 14, 47 }, { 8, 57 }, { 6, 61 }, { 7, 59 },
+ { 11, 52 }, { 15, 46 }, { 13, 49 }, { 10, 54 },
+ { 9, 56 }, { 12, 51 }, { 6, 62 }, { 8, 58 },
+ { 14, 48 }, { 7, 60 }, { 11, 53 }, { 15, 47 },
+ { 12, 52 }, { 8, 59 }, { 7, 61 }, { 14, 49 },
+ { 13, 50 }, { 10, 55 }, { 9, 57 }, { 6, 63 },
+ { 11, 54 }, { 13, 51 }, { 15, 48 }, { 10, 56 },
+ { 9, 58 }, { 8, 60 }, { 12, 53 }, { 7, 62 },
+ { 14, 50 }, { 11, 55 }, { 13, 52 }, { 15, 49 },
+ { 10, 57 }, { 9, 59 }, { 8, 61 }, { 7, 63 },
+ { 12, 54 }, { 14, 51 }, { 11, 56 }, { 13, 53 },
+ { 10, 58 }, { 15, 50 }, { 9, 60 }, { 8, 62 },
+ { 12, 55 }, { 14, 52 }, { 11, 57 }, { 10, 59 },
+ { 13, 54 }, { 15, 51 }, { 9, 61 }, { 8 , 63 },
+ { 12, 56 }, { 14, 53 }, { 11, 58 }, { 10, 60 },
+ { 13, 55 }, { 15, 52 }, { 9, 62 }, { 12, 57 },
+ { 14, 54 }, { 11, 59 }, { 10, 61 }, { 13, 56 },
+ { 9, 63 }, { 15, 53 }, { 12, 58 }, { 14, 55 },
+ { 11, 60 }, { 10, 62 }, { 13, 57 }, { 15, 54 },
+ { 12, 59 }, { 14, 56 }, { 11, 61 }, { 10, 63 },
+ { 13, 58 }, { 15, 55 }, { 12, 60 }, { 14, 57 },
+ { 11, 62 }, { 13, 59 }, { 15, 56 }, { 12, 61 },
+ { 11, 63 }, { 14, 58 }, { 13, 60 }, { 15, 57 },
+ { 12, 62 }, { 14, 59 }, { 13, 61 }, { 15, 58 },
+ { 12, 63 }, { 14, 60 }, { 13, 62 }, { 15, 59 },
+ { 14, 61 }, { 13, 63 }, { 15, 60 }, { 14, 62 },
+ { 15, 61 }, { 14, 63 }, { 15, 62 }, { 15, 63 },
+ { 32, 16 }, { 16, 32 }, { 33, 16 }, { 16, 33 },
+ { 32, 17 }, { 17, 32 }, { 34, 16 }, { 16, 34 },
+ { 33, 17 }, { 17, 33 }, { 32, 18 }, { 18, 32 },
+ { 35, 16 }, { 16, 35 }, { 34, 17 }, { 17, 34 },
+ { 33, 18 }, { 18, 33 }, { 32, 19 }, { 19, 32 },
+ { 36, 16 }, { 16, 36 }, { 35, 17 }, { 17, 35 },
+ { 34, 18 }, { 18, 34 }, { 33, 19 }, { 19, 33 },
+ { 32, 20 }, { 20, 32 }, { 37, 16 }, { 16, 37 },
+ { 36, 17 }, { 17, 36 }, { 35, 18 }, { 18, 35 },
+ { 34, 19 }, { 19, 34 }, { 33, 20 }, { 20, 33 },
+ { 38, 16 }, { 16, 38 }, { 32, 21 }, { 21, 32 },
+ { 37, 17 }, { 17, 37 }, { 36, 18 }, { 18, 36 },
+ { 35, 19 }, { 19, 35 }, { 39, 16 }, { 16, 39 },
+ { 34, 20 }, { 20, 34 }, { 33, 21 }, { 21, 33 },
+ { 38, 17 }, { 17, 38 }, { 32, 22 }, { 22, 32 },
+ { 37, 18 }, { 18, 37 }, { 36, 19 }, { 19, 36 },
+ { 40, 16 }, { 16, 40 }, { 35, 20 }, { 20, 35 },
+ { 34, 21 }, { 21, 34 }, { 39, 17 }, { 17, 39 },
+ { 33, 22 }, { 22, 33 }, { 32, 23 }, { 23, 32 },
+ { 38, 18 }, { 18, 38 }, { 37, 19 }, { 19, 37 },
+ { 41, 16 }, { 16, 41 }, { 36, 20 }, { 20, 36 },
+ { 35, 21 }, { 21, 35 }, { 40, 17 }, { 17, 40 },
+ { 34, 22 }, { 22, 34 }, { 33, 23 }, { 23, 33 },
+ { 39, 18 }, { 18, 39 }, { 32, 24 }, { 24, 32 },
+ { 38, 19 }, { 19, 38 }, { 42, 16 }, { 16, 42 },
+ { 37, 20 }, { 20, 37 }, { 41, 17 }, { 17, 41 },
+ { 36, 21 }, { 21, 36 }, { 35, 22 }, { 22, 35 },
+ { 40, 18 }, { 18, 40 }, { 34, 23 }, { 23, 34 },
+ { 33, 24 }, { 24, 33 }, { 32, 25 }, { 25, 32 },
+ { 39, 19 }, { 19, 39 }, { 43, 16 }, { 16, 43 },
+ { 38, 20 }, { 20, 38 }, { 42, 17 }, { 17, 42 },
+ { 37, 21 }, { 21, 37 }, { 36, 22 }, { 22, 36 },
+ { 41, 18 }, { 18, 41 }, { 35, 23 }, { 23, 35 },
+ { 34, 24 }, { 24, 34 }, { 40, 19 }, { 19, 40 },
+ { 44, 16 }, { 16, 44 }, { 33, 25 }, { 25, 33 },
+ { 32, 26 }, { 26, 32 }, { 39, 20 }, { 20, 39 },
+ { 43, 17 }, { 17, 43 }, { 38, 21 }, { 21, 38 },
+ { 37, 22 }, { 22, 37 }, { 42, 18 }, { 18, 42 },
+ { 36, 23 }, { 23, 36 }, { 45, 16 }, { 16, 45 },
+ { 41, 19 }, { 19, 41 }, { 35, 24 }, { 24, 35 },
+ { 34, 25 }, { 25, 34 }, { 33, 26 }, { 26, 33 },
+ { 40, 20 }, { 20, 40 }, { 32, 27 }, { 27, 32 },
+ { 44, 17 }, { 17, 44 }, { 39, 21 }, { 21, 39 },
+ { 43, 18 }, { 18, 43 }, { 38, 22 }, { 22, 38 },
+ { 37, 23 }, { 23, 37 }, { 46, 16 }, { 16, 46 },
+ { 42, 19 }, { 19, 42 }, { 36, 24 }, { 24, 36 },
+ { 35, 25 }, { 25, 35 }, { 41, 20 }, { 20, 41 },
+ { 34, 26 }, { 26, 34 }, { 45, 17 }, { 17, 45 },
+ { 33, 27 }, { 27, 33 }, { 32, 28 }, { 28, 32 },
+ { 40, 21 }, { 21, 40 }, { 44, 18 }, { 18, 44 },
+ { 39, 22 }, { 22, 39 }, { 38, 23 }, { 23, 38 },
+ { 47, 16 }, { 16, 47 }, { 43, 19 }, { 19, 43 },
+ { 37, 24 }, { 24, 37 }, { 36, 25 }, { 25, 36 },
+ { 42, 20 }, { 20, 42 }, { 46, 17 }, { 17, 46 },
+ { 35, 26 }, { 26, 35 }, { 34, 27 }, { 27, 34 },
+ { 33, 28 }, { 28, 33 }, { 41, 21 }, { 21, 41 },
+ { 32, 29 }, { 29, 32 }, { 45, 18 }, { 18, 45 },
+ { 40, 22 }, { 22, 40 }, { 48, 16 }, { 16, 48 },
+ { 39, 23 }, { 23, 39 }, { 44, 19 }, { 19, 44 },
+ { 38, 24 }, { 24, 38 }, { 43, 20 }, { 20, 43 },
+ { 47, 17 }, { 17, 47 }, { 37, 25 }, { 25, 37 },
+ { 36, 26 }, { 26, 36 }, { 35, 27 }, { 27, 35 },
+ { 42, 21 }, { 21, 42 }, { 34, 28 }, { 28, 34 },
+ { 46, 18 }, { 18, 46 }, { 33, 29 }, { 29, 33 },
+ { 32, 30 }, { 30, 32 }, { 41, 22 }, { 22, 41 },
+ { 49, 16 }, { 16, 49 }, { 45, 19 }, { 19, 45 },
+ { 40, 23 }, { 23, 40 }, { 39, 24 }, { 24, 39 },
+ { 48, 17 }, { 17, 48 }, { 44, 20 }, { 20, 44 },
+ { 38, 25 }, { 25, 38 }, { 37, 26 }, { 26, 37 },
+ { 43, 21 }, { 21, 43 }, { 36, 27 }, { 27, 36 },
+ { 47, 18 }, { 18, 47 }, { 35, 28 }, { 28, 35 },
+ { 34, 29 }, { 29, 34 }, { 42, 22 }, { 22, 42 },
+ { 33, 30 }, { 30, 33 }, { 50, 16 }, { 16, 50 },
+ { 32, 31 }, { 31, 32 }, { 46, 19 }, { 19, 46 },
+ { 41, 23 }, { 23, 41 }, { 40, 24 }, { 24, 40 },
+ { 49, 17 }, { 17, 49 }, { 45, 20 }, { 20, 45 },
+ { 39, 25 }, { 25, 39 }, { 38, 26 }, { 26, 38 },
+ { 44, 21 }, { 21, 44 }, { 48, 18 }, { 18, 48 },
+ { 37, 27 }, { 27, 37 }, { 36, 28 }, { 28, 36 },
+ { 51, 16 }, { 16, 51 }, { 43, 22 }, { 22, 43 },
+ { 35, 29 }, { 29, 35 }, { 34, 30 }, { 30, 34 },
+ { 33, 31 }, { 31, 33 }, { 32, 32 }, { 47, 19 },
+ { 19, 47 }, { 42, 23 }, { 23, 42 }, { 50, 17 },
+ { 17, 50 }, { 41, 24 }, { 24, 41 }, { 46, 20 },
+ { 20, 46 }, { 40, 25 }, { 25, 40 }, { 45, 21 },
+ { 21, 45 }, { 49, 18 }, { 18, 49 }, { 39, 26 },
+ { 26, 39 }, { 38, 27 }, { 27, 38 }, { 52, 16 },
+ { 16, 52 }, { 44, 22 }, { 22, 44 }, { 37, 28 },
+ { 28, 37 }, { 36, 29 }, { 29, 36 }, { 48, 19 },
+ { 19, 48 }, { 35, 30 }, { 30, 35 }, { 34, 31 },
+ { 31, 34 }, { 33, 32 }, { 32, 33 }, { 43, 23 },
+ { 23, 43 }, { 51, 17 }, { 17, 51 }, { 47, 20 },
+ { 20, 47 }, { 42, 24 }, { 24, 42 }, { 41, 25 },
+ { 25, 41 }, { 50, 18 }, { 18, 50 }, { 46, 21 },
+ { 21, 46 }, { 40, 26 }, { 26, 40 }, { 53, 16 },
+ { 16, 53 }, { 39, 27 }, { 27, 39 }, { 45, 22 },
+ { 22, 45 }, { 38, 28 }, { 28, 38 }, { 49, 19 },
+ { 19, 49 }, { 37, 29 }, { 29, 37 }, { 36, 30 },
+ { 30, 36 }, { 44, 23 }, { 23, 44 }, { 52, 17 },
+ { 17, 52 }, { 35, 31 }, { 31, 35 }, { 34, 32 },
+ { 32, 34 }, { 33, 33 }, { 48, 20 }, { 20, 48 },
+ { 43, 24 }, { 24, 43 }, { 42, 25 }, { 25, 42 },
+ { 51, 18 }, { 18, 51 }, { 47, 21 }, { 21, 47 },
+ { 54, 16 }, { 16, 54 }, { 41, 26 }, { 26, 41 },
+ { 40, 27 }, { 27, 40 }, { 46, 22 }, { 22, 46 },
+ { 50, 19 }, { 19, 50 }, { 39, 28 }, { 28, 39 },
+ { 38, 29 }, { 29, 38 }, { 53, 17 }, { 17, 53 },
+ { 45, 23 }, { 23, 45 }, { 37, 30 }, { 30, 37 },
+ { 36, 31 }, { 31, 36 }, { 49, 20 }, { 20, 49 },
+ { 35, 32 }, { 32, 35 }, { 34, 33 }, { 33, 34 },
+ { 44, 24 }, { 24, 44 }, { 52, 18 }, { 18, 52 },
+ { 43, 25 }, { 25, 43 }, { 48, 21 }, { 21, 48 },
+ { 55, 16 }, { 16, 55 }, { 42, 26 }, { 26, 42 },
+ { 47, 22 }, { 22, 47 }, { 51, 19 }, { 19, 51 },
+ { 41, 27 }, { 27, 41 }, { 40, 28 }, { 28, 40 },
+ { 54, 17 }, { 17, 54 }, { 46, 23 }, { 23, 46 },
+ { 39, 29 }, { 29, 39 }, { 50, 20 }, { 20, 50 },
+ { 38, 30 }, { 30, 38 }, { 37, 31 }, { 31, 37 },
+ { 45, 24 }, { 24, 45 }, { 36, 32 }, { 32, 36 },
+ { 35, 33 }, { 33, 35 }, { 34, 34 }, { 53, 18 },
+ { 18, 53 }, { 56, 16 }, { 16, 56 }, { 49, 21 },
+ { 21, 49 }, { 44, 25 }, { 25, 44 }, { 43, 26 },
+ { 26, 43 }, { 48, 22 }, { 22, 48 }, { 52, 19 },
+ { 19, 52 }, { 42, 27 }, { 27, 42 }, { 55, 17 },
+ { 17, 55 }, { 41, 28 }, { 28, 41 }, { 47, 23 },
+ { 23, 47 }, { 40, 29 }, { 29, 40 }, { 51, 20 },
+ { 20, 51 }, { 39, 30 }, { 30, 39 }, { 46, 24 },
+ { 24, 46 }, { 38, 31 }, { 31, 38 }, { 54, 18 },
+ { 18, 54 }, { 37, 32 }, { 32, 37 }, { 57, 16 },
+ { 16, 57 }, { 36, 33 }, { 33, 36 }, { 35, 34 },
+ { 34, 35 }, { 50, 21 }, { 21, 50 }, { 45, 25 },
+ { 25, 45 }, { 44, 26 }, { 26, 44 }, { 53, 19 },
+ { 19, 53 }, { 49, 22 }, { 22, 49 }, { 56, 17 },
+ { 17, 56 }, { 43, 27 }, { 27, 43 }, { 48, 23 },
+ { 23, 48 }, { 42, 28 }, { 28, 42 }, { 52, 20 },
+ { 20, 52 }, { 41, 29 }, { 29, 41 }, { 40, 30 },
+ { 30, 40 }, { 47, 24 }, { 24, 47 }, { 55, 18 },
+ { 18, 55 }, { 58, 16 }, { 16, 58 }, { 39, 31 },
+ { 31, 39 }, { 51, 21 }, { 21, 51 }, { 38, 32 },
+ { 32, 38 }, { 37, 33 }, { 33, 37 }, { 46, 25 },
+ { 25, 46 }, { 36, 34 }, { 34, 36 }, { 35, 35 },
+ { 54, 19 }, { 19, 54 }, { 45, 26 }, { 26, 45 },
+ { 50, 22 }, { 22, 50 }, { 57, 17 }, { 17, 57 },
+ { 44, 27 }, { 27, 44 }, { 49, 23 }, { 23, 49 },
+ { 53, 20 }, { 20, 53 }, { 43, 28 }, { 28, 43 },
+ { 42, 29 }, { 29, 42 }, { 56, 18 }, { 18, 56 },
+ { 59, 16 }, { 16, 59 }, { 48, 24 }, { 24, 48 },
+ { 41, 30 }, { 30, 41 }, { 52, 21 }, { 21, 52 },
+ { 40, 31 }, { 31, 40 }, { 39, 32 }, { 32, 39 },
+ { 47, 25 }, { 25, 47 }, { 38, 33 }, { 33, 38 },
+ { 37, 34 }, { 34, 37 }, { 36, 35 }, { 35, 36 },
+ { 55, 19 }, { 19, 55 }, { 51, 22 }, { 22, 51 },
+ { 46, 26 }, { 26, 46 }, { 58, 17 }, { 17, 58 },
+ { 45, 27 }, { 27, 45 }, { 50, 23 }, { 23, 50 },
+ { 54, 20 }, { 20, 54 }, { 44, 28 }, { 28, 44 },
+ { 57, 18 }, { 18, 57 }, { 60, 16 }, { 16, 60 },
+ { 43, 29 }, { 29, 43 }, { 49, 24 }, { 24, 49 },
+ { 53, 21 }, { 21, 53 }, { 42, 30 }, { 30, 42 },
+ { 41, 31 }, { 31, 41 }, { 48, 25 }, { 25, 48 },
+ { 40, 32 }, { 32, 40 }, { 56, 19 }, { 19, 56 },
+ { 39, 33 }, { 33, 39 }, { 59, 17 }, { 17, 59 },
+ { 52, 22 }, { 22, 52 }, { 38, 34 }, { 34, 38 },
+ { 37, 35 }, { 35, 37 }, { 36, 36 }, { 47, 26 },
+ { 26, 47 }, { 46, 27 }, { 27, 46 }, { 55, 20 },
+ { 20, 55 }, { 51, 23 }, { 23, 51 }, { 45, 28 },
+ { 28, 45 }, { 61, 16 }, { 16, 61 }, { 58, 18 },
+ { 18, 58 }, { 50, 24 }, { 24, 50 }, { 44, 29 },
+ { 29, 44 }, { 54, 21 }, { 21, 54 }, { 43, 30 },
+ { 30, 43 }, { 49, 25 }, { 25, 49 }, { 42, 31 },
+ { 31, 42 }, { 57, 19 }, { 19, 57 }, { 60, 17 },
+ { 17, 60 }, { 41, 32 }, { 32, 41 }, { 53, 22 },
+ { 22, 53 }, { 40, 33 }, { 33, 40 }, { 48, 26 },
+ { 26, 48 }, { 39, 34 }, { 34, 39 }, { 38, 35 },
+ { 35, 38 }, { 37, 36 }, { 36, 37 }, { 56, 20 },
+ { 20, 56 }, { 47, 27 }, { 27, 47 }, { 52, 23 },
+ { 23, 52 }, { 62, 16 }, { 16, 62 }, { 59, 18 },
+ { 18, 59 }, { 46, 28 }, { 28, 46 }, { 51, 24 },
+ { 24, 51 }, { 55, 21 }, { 21, 55 }, { 45, 29 },
+ { 29, 45 }, { 44, 30 }, { 30, 44 }, { 58, 19 },
+ { 19, 58 }, { 50, 25 }, { 25, 50 }, { 61, 17 },
+ { 17, 61 }, { 43, 31 }, { 31, 43 }, { 54, 22 },
+ { 22, 54 }, { 42, 32 }, { 32, 42 }, { 49, 26 },
+ { 26, 49 }, { 41, 33 }, { 33, 41 }, { 40, 34 },
+ { 34, 40 }, { 57, 20 }, { 20, 57 }, { 39, 35 },
+ { 35, 39 }, { 38, 36 }, { 36, 38 }, { 37, 37 },
+ { 53, 23 }, { 23, 53 }, { 48, 27 }, { 27, 48 },
+ { 63, 16 }, { 16, 63 }, { 60, 18 }, { 18, 60 },
+ { 47, 28 }, { 28, 47 }, { 52, 24 }, { 24, 52 },
+ { 56, 21 }, { 21, 56 }, { 46, 29 }, { 29, 46 },
+ { 59, 19 }, { 19, 59 }, { 62, 17 }, { 17, 62 },
+ { 45, 30 }, { 30, 45 }, { 51, 25 }, { 25, 51 },
+ { 55, 22 }, { 22, 55 }, { 44, 31 }, { 31, 44 },
+ { 43, 32 }, { 32, 43 }, { 50, 26 }, { 26, 50 },
+ { 42, 33 }, { 33, 42 }, { 58, 20 }, { 20, 58 },
+ { 41, 34 }, { 34, 41 }, { 54, 23 }, { 23, 54 },
+ { 61, 18 }, { 18, 61 }, { 40, 35 }, { 35, 40 },
+ { 49, 27 }, { 27, 49 }, { 39, 36 }, { 36, 39 },
+ { 38, 37 }, { 37, 38 }, { 48, 28 }, { 28, 48 },
+ { 57, 21 }, { 21, 57 }, { 53, 24 }, { 24, 53 },
+ { 47, 29 }, { 29, 47 }, { 60, 19 }, { 19, 60 },
+ { 63, 17 }, { 17, 63 }, { 52, 25 }, { 25, 52 },
+ { 46, 30 }, { 30, 46 }, { 56, 22 }, { 22, 56 },
+ { 45, 31 }, { 31, 45 }, { 51, 26 }, { 26, 51 },
+ { 44, 32 }, { 32, 44 }, { 59, 20 }, { 20, 59 },
+ { 62, 18 }, { 18, 62 }, { 43, 33 }, { 33, 43 },
+ { 55, 23 }, { 23, 55 }, { 42, 34 }, { 34, 42 },
+ { 50, 27 }, { 27, 50 }, { 41, 35 }, { 35, 41 },
+ { 40, 36 }, { 36, 40 }, { 39, 37 }, { 37, 39 },
+ { 38, 38 }, { 58, 21 }, { 21, 58 }, { 49, 28 },
+ { 28, 49 }, { 54, 24 }, { 24, 54 }, { 61, 19 },
+ { 19, 61 }, { 48, 29 }, { 29, 48 }, { 53, 25 },
+ { 25, 53 }, { 57, 22 }, { 22, 57 }, { 47, 30 },
+ { 30, 47 }, { 46, 31 }, { 31, 46 }, { 60, 20 },
+ { 20, 60 }, { 52, 26 }, { 26, 52 }, { 63, 18 },
+ { 18, 63 }, { 45, 32 }, { 32, 45 }, { 56, 23 },
+ { 23, 56 }, { 44, 33 }, { 33, 44 }, { 51, 27 },
+ { 27, 51 }, { 43, 34 }, { 34, 43 }, { 42, 35 },
+ { 35, 42 }, { 59, 21 }, { 21, 59 }, { 41, 36 },
+ { 36, 41 }, { 55, 24 }, { 24, 55 }, { 40, 37 },
+ { 37, 40 }, { 50, 28 }, { 28, 50 }, { 39, 38 },
+ { 38, 39 }, { 62, 19 }, { 19, 62 }, { 49, 29 },
+ { 29, 49 }, { 54, 25 }, { 25, 54 }, { 58, 22 },
+ { 22, 58 }, { 48, 30 }, { 30, 48 }, { 61, 20 },
+ { 20, 61 }, { 47, 31 }, { 31, 47 }, { 53, 26 },
+ { 26, 53 }, { 57, 23 }, { 23, 57 }, { 46, 32 },
+ { 32, 46 }, { 52, 27 }, { 27, 52 }, { 45, 33 },
+ { 33, 45 }, { 60, 21 }, { 21, 60 }, { 44, 34 },
+ { 34, 44 }, { 56, 24 }, { 24, 56 }, { 43, 35 },
+ { 35, 43 }, { 63, 19 }, { 19, 63 }, { 51, 28 },
+ { 28, 51 }, { 42, 36 }, { 36, 42 }, { 41, 37 },
+ { 37, 41 }, { 40, 38 }, { 38, 40 }, { 39, 39 },
+ { 50, 29 }, { 29, 50 }, { 55, 25 }, { 25, 55 },
+ { 59, 22 }, { 22, 59 }, { 49, 30 }, { 30, 49 },
+ { 62, 20 }, { 20, 62 }, { 54, 26 }, { 26, 54 },
+ { 48, 31 }, { 31, 48 }, { 58, 23 }, { 23, 58 },
+ { 47, 32 }, { 32, 47 }, { 53, 27 }, { 27, 53 },
+ { 46, 33 }, { 33, 46 }, { 61, 21 }, { 21, 61 },
+ { 57, 24 }, { 24, 57 }, { 45, 34 }, { 34, 45 },
+ { 52, 28 }, { 28, 52 }, { 44, 35 }, { 35, 44 },
+ { 43, 36 }, { 36, 43 }, { 42, 37 }, { 37, 42 },
+ { 41, 38 }, { 38, 41 }, { 40, 39 }, { 39, 40 },
+ { 60, 22 }, { 22, 60 }, { 51, 29 }, { 29, 51 },
+ { 56, 25 }, { 25, 56 }, { 63, 20 }, { 20, 63 },
+ { 50, 30 }, { 30, 50 }, { 55, 26 }, { 26, 55 },
+ { 59, 23 }, { 23, 59 }, { 49, 31 }, { 31, 49 },
+ { 48, 32 }, { 32, 48 }, { 54, 27 }, { 27, 54 },
+ { 62, 21 }, { 21, 62 }, { 47, 33 }, { 33, 47 },
+ { 58, 24 }, { 24, 58 }, { 46, 34 }, { 34, 46 },
+ { 53, 28 }, { 28, 53 }, { 45, 35 }, { 35, 45 },
+ { 44, 36 }, { 36, 44 }, { 61, 22 }, { 22, 61 },
+ { 57, 25 }, { 25, 57 }, { 52, 29 }, { 29, 52 },
+ { 43, 37 }, { 37, 43 }, { 42, 38 }, { 38, 42 },
+ { 41, 39 }, { 39, 41 }, { 40, 40 }, { 51, 30 },
+ { 30, 51 }, { 56, 26 }, { 26, 56 }, { 60, 23 },
+ { 23, 60 }, { 50, 31 }, { 31, 50 }, { 63, 21 },
+ { 21, 63 }, { 55, 27 }, { 27, 55 }, { 49, 32 },
+ { 32, 49 }, { 59, 24 }, { 24, 59 }, { 48, 33 },
+ { 33, 48 }, { 54, 28 }, { 28, 54 }, { 47, 34 },
+ { 34, 47 }, { 62, 22 }, { 22, 62 }, { 46, 35 },
+ { 35, 46 }, { 58, 25 }, { 25, 58 }, { 45, 36 },
+ { 36, 45 }, { 53, 29 }, { 29, 53 }, { 44, 37 },
+ { 37, 44 }, { 43, 38 }, { 38, 43 }, { 42, 39 },
+ { 39, 42 }, { 41, 40 }, { 40, 41 }, { 52, 30 },
+ { 30, 52 }, { 57, 26 }, { 26, 57 }, { 61, 23 },
+ { 23, 61 }, { 51, 31 }, { 31, 51 }, { 56, 27 },
+ { 27, 56 }, { 50, 32 }, { 32, 50 }, { 60, 24 },
+ { 24, 60 }, { 49, 33 }, { 33, 49 }, { 55, 28 },
+ { 28, 55 }, { 63, 22 }, { 22, 63 }, { 48, 34 },
+ { 34, 48 }, { 59, 25 }, { 25, 59 }, { 47, 35 },
+ { 35, 47 }, { 54, 29 }, { 29, 54 }, { 46, 36 },
+ { 36, 46 }, { 45, 37 }, { 37, 45 }, { 44, 38 },
+ { 38, 44 }, { 53, 30 }, { 30, 53 }, { 62, 23 },
+ { 23, 62 }, { 58, 26 }, { 26, 58 }, { 43, 39 },
+ { 39, 43 }, { 42, 40 }, { 40, 42 }, { 41, 41 },
+ { 52, 31 }, { 31, 52 }, { 57, 27 }, { 27, 57 },
+ { 61, 24 }, { 24, 61 }, { 51, 32 }, { 32, 51 },
+ { 50, 33 }, { 33, 50 }, { 56, 28 }, { 28, 56 },
+ { 49, 34 }, { 34, 49 }, { 60, 25 }, { 25, 60 },
+ { 48, 35 }, { 35, 48 }, { 55, 29 }, { 29, 55 },
+ { 47, 36 }, { 36, 47 }, { 63, 23 }, { 23, 63 },
+ { 46, 37 }, { 37, 46 }, { 59, 26 }, { 26, 59 },
+ { 54, 30 }, { 30, 54 }, { 45, 38 }, { 38, 45 },
+ { 44, 39 }, { 39, 44 }, { 43, 40 }, { 40, 43 },
+ { 42, 41 }, { 41, 42 }, { 53, 31 }, { 31, 53 },
+ { 58, 27 }, { 27, 58 }, { 62, 24 }, { 24, 62 },
+ { 52, 32 }, { 32, 52 }, { 57, 28 }, { 28, 57 },
+ { 51, 33 }, { 33, 51 }, { 61, 25 }, { 25, 61 },
+ { 50, 34 }, { 34, 50 }, { 56, 29 }, { 29, 56 },
+ { 49, 35 }, { 35, 49 }, { 48, 36 }, { 36, 48 },
+ { 60, 26 }, { 26, 60 }, { 55, 30 }, { 30, 55 },
+ { 47, 37 }, { 37, 47 }, { 46, 38 }, { 38, 46 },
+ { 45, 39 }, { 39, 45 }, { 44, 40 }, { 40, 44 },
+ { 54, 31 }, { 31, 54 }, { 43, 41 }, { 41, 43 },
+ { 42, 42 }, { 59, 27 }, { 27, 59 }, { 63, 24 },
+ { 24, 63 }, { 53, 32 }, { 32, 53 }, { 58, 28 },
+ { 28, 58 }, { 52, 33 }, { 33, 52 }, { 62, 25 },
+ { 25, 62 }, { 51, 34 }, { 34, 51 }, { 57, 29 },
+ { 29, 57 }, { 50, 35 }, { 35, 50 }, { 61, 26 },
+ { 26, 61 }, { 49, 36 }, { 36, 49 }, { 56, 30 },
+ { 30, 56 }, { 48, 37 }, { 37, 48 }, { 47, 38 },
+ { 38, 47 }, { 55, 31 }, { 31, 55 }, { 60, 27 },
+ { 27, 60 }, { 46, 39 }, { 39, 46 }, { 45, 40 },
+ { 40, 45 }, { 44, 41 }, { 41, 44 }, { 43, 42 },
+ { 42, 43 }, { 54, 32 }, { 32, 54 }, { 59, 28 },
+ { 28, 59 }, { 63, 25 }, { 25, 63 }, { 53, 33 },
+ { 33, 53 }, { 52, 34 }, { 34, 52 }, { 58, 29 },
+ { 29, 58 }, { 51, 35 }, { 35, 51 }, { 62, 26 },
+ { 26, 62 }, { 57, 30 }, { 30, 57 }, { 50, 36 },
+ { 36, 50 }, { 49, 37 }, { 37, 49 }, { 61, 27 },
+ { 27, 61 }, { 48, 38 }, { 38, 48 }, { 56, 31 },
+ { 31, 56 }, { 47, 39 }, { 39, 47 }, { 46, 40 },
+ { 40, 46 }, { 45, 41 }, { 41, 45 }, { 44, 42 },
+ { 42, 44 }, { 43, 43 }, { 55, 32 }, { 32, 55 },
+ { 60, 28 }, { 28, 60 }, { 54, 33 }, { 33, 54 },
+ { 59, 29 }, { 29, 59 }, { 53, 34 }, { 34, 53 },
+ { 63, 26 }, { 26, 63 }, { 52, 35 }, { 35, 52 },
+ { 58, 30 }, { 30, 58 }, { 51, 36 }, { 36, 51 },
+ { 50, 37 }, { 37, 50 }, { 62, 27 }, { 27, 62 },
+ { 57, 31 }, { 31, 57 }, { 49, 38 }, { 38, 49 },
+ { 48, 39 }, { 39, 48 }, { 47, 40 }, { 40, 47 },
+ { 56, 32 }, { 32, 56 }, { 46, 41 }, { 41, 46 },
+ { 61, 28 }, { 28, 61 }, { 45, 42 }, { 42, 45 },
+ { 44, 43 }, { 43, 44 }, { 55, 33 }, { 33, 55 },
+ { 60, 29 }, { 29, 60 }, { 54, 34 }, { 34, 54 },
+ { 53, 35 }, { 35, 53 }, { 59, 30 }, { 30, 59 },
+ { 52, 36 }, { 36, 52 }, { 63, 27 }, { 27, 63 },
+ { 51, 37 }, { 37, 51 }, { 58, 31 }, { 31, 58 },
+ { 50, 38 }, { 38, 50 }, { 49, 39 }, { 39, 49 },
+ { 57, 32 }, { 32, 57 }, { 62, 28 }, { 28, 62 },
+ { 48, 40 }, { 40, 48 }, { 47, 41 }, { 41, 47 },
+ { 46, 42 }, { 42, 46 }, { 45, 43 }, { 43, 45 },
+ { 44, 44 }, { 56, 33 }, { 33, 56 }, { 61, 29 },
+ { 29, 61 }, { 55, 34 }, { 34, 55 }, { 54, 35 },
+ { 35, 54 }, { 60, 30 }, { 30, 60 }, { 53, 36 },
+ { 36, 53 }, { 59, 31 }, { 31, 59 }, { 52, 37 },
+ { 37, 52 }, { 51, 38 }, { 38, 51 }, { 63, 28 },
+ { 28, 63 }, { 58, 32 }, { 32, 58 }, { 50, 39 },
+ { 39, 50 }, { 49, 40 }, { 40, 49 }, { 48, 41 },
+ { 41, 48 }, { 57, 33 }, { 33, 57 }, { 47, 42 },
+ { 42, 47 }, { 46, 43 }, { 43, 46 }, { 45, 44 },
+ { 44, 45 }, { 62, 29 }, { 29, 62 }, { 56, 34 },
+ { 34, 56 }, { 61, 30 }, { 30, 61 }, { 55, 35 },
+ { 35, 55 }, { 54, 36 }, { 36, 54 }, { 60, 31 },
+ { 31, 60 }, { 53, 37 }, { 37, 53 }, { 52, 38 },
+ { 38, 52 }, { 59, 32 }, { 32, 59 }, { 51, 39 },
+ { 39, 51 }, { 50, 40 }, { 40, 50 }, { 58, 33 },
+ { 33, 58 }, { 49, 41 }, { 41, 49 }, { 63, 29 },
+ { 29, 63 }, { 48, 42 }, { 42, 48 }, { 47, 43 },
+ { 43, 47 }, { 46, 44 }, { 44, 46 }, { 45, 45 },
+ { 57, 34 }, { 34, 57 }, { 62, 30 }, { 30, 62 },
+ { 56, 35 }, { 35, 56 }, { 55, 36 }, { 36, 55 },
+ { 61, 31 }, { 31, 61 }, { 54, 37 }, { 37, 54 },
+ { 60, 32 }, { 32, 60 }, { 53, 38 }, { 38, 53 },
+ { 52, 39 }, { 39, 52 }, { 51, 40 }, { 40, 51 },
+ { 59, 33 }, { 33, 59 }, { 50, 41 }, { 41, 50 },
+ { 49, 42 }, { 42, 49 }, { 48, 43 }, { 43, 48 },
+ { 58, 34 }, { 34, 58 }, { 47, 44 }, { 44, 47 },
+ { 46, 45 }, { 45, 46 }, { 63, 30 }, { 30, 63 },
+ { 57, 35 }, { 35, 57 }, { 62, 31 }, { 31, 62 },
+ { 56, 36 }, { 36, 56 }, { 55, 37 }, { 37, 55 },
+ { 61, 32 }, { 32, 61 }, { 54, 38 }, { 38, 54 },
+ { 53, 39 }, { 39, 53 }, { 60, 33 }, { 33, 60 },
+ { 52, 40 }, { 40, 52 }, { 51, 41 }, { 41, 51 },
+ { 50, 42 }, { 42, 50 }, { 59, 34 }, { 34, 59 },
+ { 49, 43 }, { 43, 49 }, { 48, 44 }, { 44, 48 },
+ { 47, 45 }, { 45, 47 }, { 46, 46 }, { 58, 35 },
+ { 35, 58 }, { 63, 31 }, { 31, 63 }, { 57, 36 },
+ { 36, 57 }, { 56, 37 }, { 37, 56 }, { 62, 32 },
+ { 32, 62 }, { 55, 38 }, { 38, 55 }, { 54, 39 },
+ { 39, 54 }, { 61, 33 }, { 33, 61 }, { 53, 40 },
+ { 40, 53 }, { 52, 41 }, { 41, 52 }, { 60, 34 },
+ { 34, 60 }, { 51, 42 }, { 42, 51 }, { 50, 43 },
+ { 43, 50 }, { 49, 44 }, { 44, 49 }, { 48, 45 },
+ { 45, 48 }, { 59, 35 }, { 35, 59 }, { 47, 46 },
+ { 46, 47 }, { 58, 36 }, { 36, 58 }, { 57, 37 },
+ { 37, 57 }, { 63, 32 }, { 32, 63 }, { 56, 38 },
+ { 38, 56 }, { 62, 33 }, { 33, 62 }, { 55, 39 },
+ { 39, 55 }, { 54, 40 }, { 40, 54 }, { 61, 34 },
+ { 34, 61 }, { 53, 41 }, { 41, 53 }, { 52, 42 },
+ { 42, 52 }, { 51, 43 }, { 43, 51 }, { 60, 35 },
+ { 35, 60 }, { 50, 44 }, { 44, 50 }, { 49, 45 },
+ { 45, 49 }, { 48, 46 }, { 46, 48 }, { 47, 47 },
+ { 59, 36 }, { 36, 59 }, { 58, 37 }, { 37, 58 },
+ { 57, 38 }, { 38, 57 }, { 63, 33 }, { 33, 63 },
+ { 56, 39 }, { 39, 56 }, { 55, 40 }, { 40, 55 },
+ { 62, 34 }, { 34, 62 }, { 54, 41 }, { 41, 54 },
+ { 53, 42 }, { 42, 53 }, { 61, 35 }, { 35, 61 },
+ { 52, 43 }, { 43, 52 }, { 51, 44 }, { 44, 51 },
+ { 50, 45 }, { 45, 50 }, { 49, 46 }, { 46, 49 },
+ { 48, 47 }, { 47, 48 }, { 60, 36 }, { 36, 60 },
+ { 59, 37 }, { 37, 59 }, { 58, 38 }, { 38, 58 },
+ { 57, 39 }, { 39, 57 }, { 56, 40 }, { 40, 56 },
+ { 63, 34 }, { 34, 63 }, { 55, 41 }, { 41, 55 },
+ { 54, 42 }, { 42, 54 }, { 62, 35 }, { 35, 62 },
+ { 53, 43 }, { 43, 53 }, { 52, 44 }, { 44, 52 },
+ { 51, 45 }, { 45, 51 }, { 61, 36 }, { 36, 61 },
+ { 50, 46 }, { 46, 50 }, { 49, 47 }, { 47, 49 },
+ { 48, 48 }, { 60, 37 }, { 37, 60 }, { 59, 38 },
+ { 38, 59 }, { 58, 39 }, { 39, 58 }, { 57, 40 },
+ { 40, 57 }, { 56, 41 }, { 41, 56 }, { 63, 35 },
+ { 35, 63 }, { 55, 42 }, { 42, 55 }, { 54, 43 },
+ { 43, 54 }, { 53, 44 }, { 44, 53 }, { 62, 36 },
+ { 36, 62 }, { 52, 45 }, { 45, 52 }, { 51, 46 },
+ { 46, 51 }, { 50, 47 }, { 47, 50 }, { 49, 48 },
+ { 48, 49 }, { 61, 37 }, { 37, 61 }, { 60, 38 },
+ { 38, 60 }, { 59, 39 }, { 39, 59 }, { 58, 40 },
+ { 40, 58 }, { 57, 41 }, { 41, 57 }, { 56, 42 },
+ { 42, 56 }, { 55, 43 }, { 43, 55 }, { 63, 36 },
+ { 36, 63 }, { 54, 44 }, { 44, 54 }, { 53, 45 },
+ { 45, 53 }, { 52, 46 }, { 46, 52 }, { 62, 37 },
+ { 37, 62 }, { 51, 47 }, { 47, 51 }, { 50, 48 },
+ { 48, 50 }, { 49, 49 }, { 61, 38 }, { 38, 61 },
+ { 60, 39 }, { 39, 60 }, { 59, 40 }, { 40, 59 },
+ { 58, 41 }, { 41, 58 }, { 57, 42 }, { 42, 57 },
+ { 56, 43 }, { 43, 56 }, { 55, 44 }, { 44, 55 },
+ { 54, 45 }, { 45, 54 }, { 63, 37 }, { 37, 63 },
+ { 53, 46 }, { 46, 53 }, { 52, 47 }, { 47, 52 },
+ { 51, 48 }, { 48, 51 }, { 50, 49 }, { 49, 50 },
+ { 62, 38 }, { 38, 62 }, { 61, 39 }, { 39, 61 },
+ { 60, 40 }, { 40, 60 }, { 59, 41 }, { 41, 59 },
+ { 58, 42 }, { 42, 58 }, { 57, 43 }, { 43, 57 },
+ { 56, 44 }, { 44, 56 }, { 55, 45 }, { 45, 55 },
+ { 54, 46 }, { 46, 54 }, { 53, 47 }, { 47, 53 },
+ { 52, 48 }, { 48, 52 }, { 63, 38 }, { 38, 63 },
+ { 51, 49 }, { 49, 51 }, { 50, 50 }, { 62, 39 },
+ { 39, 62 }, { 61, 40 }, { 40, 61 }, { 60, 41 },
+ { 41, 60 }, { 59, 42 }, { 42, 59 }, { 58, 43 },
+ { 43, 58 }, { 57, 44 }, { 44, 57 }, { 56, 45 },
+ { 45, 56 }, { 55, 46 }, { 46, 55 }, { 54, 47 },
+ { 47, 54 }, { 53, 48 }, { 48, 53 }, { 52, 49 },
+ { 49, 52 }, { 51, 50 }, { 50, 51 }, { 63, 39 },
+ { 39, 63 }, { 62, 40 }, { 40, 62 }, { 61, 41 },
+ { 41, 61 }, { 60, 42 }, { 42, 60 }, { 59, 43 },
+ { 43, 59 }, { 58, 44 }, { 44, 58 }, { 57, 45 },
+ { 45, 57 }, { 56, 46 }, { 46, 56 }, { 55, 47 },
+ { 47, 55 }, { 54, 48 }, { 48, 54 }, { 53, 49 },
+ { 49, 53 }, { 52, 50 }, { 50, 52 }, { 51, 51 },
+ { 63, 40 }, { 40, 63 }, { 62, 41 }, { 41, 62 },
+ { 61, 42 }, { 42, 61 }, { 60, 43 }, { 43, 60 },
+ { 59, 44 }, { 44, 59 }, { 58, 45 }, { 45, 58 },
+ { 57, 46 }, { 46, 57 }, { 56, 47 }, { 47, 56 },
+ { 55, 48 }, { 48, 55 }, { 54, 49 }, { 49, 54 },
+ { 53, 50 }, { 50, 53 }, { 52, 51 }, { 51, 52 },
+ { 63, 41 }, { 41, 63 }, { 62, 42 }, { 42, 62 },
+ { 61, 43 }, { 43, 61 }, { 60, 44 }, { 44, 60 },
+ { 59, 45 }, { 45, 59 }, { 58, 46 }, { 46, 58 },
+ { 57, 47 }, { 47, 57 }, { 56, 48 }, { 48, 56 },
+ { 55, 49 }, { 49, 55 }, { 54, 50 }, { 50, 54 },
+ { 53, 51 }, { 51, 53 }, { 52, 52 }, { 63, 42 },
+ { 42, 63 }, { 62, 43 }, { 43, 62 }, { 61, 44 },
+ { 44, 61 }, { 60, 45 }, { 45, 60 }, { 59, 46 },
+ { 46, 59 }, { 58, 47 }, { 47, 58 }, { 57, 48 },
+ { 48, 57 }, { 56, 49 }, { 49, 56 }, { 55, 50 },
+ { 50, 55 }, { 54, 51 }, { 51, 54 }, { 53, 52 },
+ { 52, 53 }, { 63, 43 }, { 43, 63 }, { 62, 44 },
+ { 44, 62 }, { 61, 45 }, { 45, 61 }, { 60, 46 },
+ { 46, 60 }, { 59, 47 }, { 47, 59 }, { 58, 48 },
+ { 48, 58 }, { 57, 49 }, { 49, 57 }, { 56, 50 },
+ { 50, 56 }, { 55, 51 }, { 51, 55 }, { 54, 52 },
+ { 52, 54 }, { 53, 53 }, { 63, 44 }, { 44, 63 },
+ { 62, 45 }, { 45, 62 }, { 61, 46 }, { 46, 61 },
+ { 60, 47 }, { 47, 60 }, { 59, 48 }, { 48, 59 },
+ { 58, 49 }, { 49, 58 }, { 57, 50 }, { 50, 57 },
+ { 56, 51 }, { 51, 56 }, { 55, 52 }, { 52, 55 },
+ { 54, 53 }, { 53, 54 }, { 63, 45 }, { 45, 63 },
+ { 62, 46 }, { 46, 62 }, { 61, 47 }, { 47, 61 },
+ { 60, 48 }, { 48, 60 }, { 59, 49 }, { 49, 59 },
+ { 58, 50 }, { 50, 58 }, { 57, 51 }, { 51, 57 },
+ { 56, 52 }, { 52, 56 }, { 55, 53 }, { 53, 55 },
+ { 54, 54 }, { 63, 46 }, { 46, 63 }, { 62, 47 },
+ { 47, 62 }, { 61, 48 }, { 48, 61 }, { 60, 49 },
+ { 49, 60 }, { 59, 50 }, { 50, 59 }, { 58, 51 },
+ { 51, 58 }, { 57, 52 }, { 52, 57 }, { 56, 53 },
+ { 53, 56 }, { 55, 54 }, { 54, 55 }, { 63, 47 },
+ { 47, 63 }, { 62, 48 }, { 48, 62 }, { 61, 49 },
+ { 49, 61 }, { 60, 50 }, { 50, 60 }, { 59, 51 },
+ { 51, 59 }, { 58, 52 }, { 52, 58 }, { 57, 53 },
+ { 53, 57 }, { 56, 54 }, { 54, 56 }, { 55, 55 },
+ { 63, 48 }, { 48, 63 }, { 62, 49 }, { 49, 62 },
+ { 61, 50 }, { 50, 61 }, { 60, 51 }, { 51, 60 },
+ { 59, 52 }, { 52, 59 }, { 58, 53 }, { 53, 58 },
+ { 57, 54 }, { 54, 57 }, { 56, 55 }, { 55, 56 },
+ { 63, 49 }, { 49, 63 }, { 62, 50 }, { 50, 62 },
+ { 61, 51 }, { 51, 61 }, { 60, 52 }, { 52, 60 },
+ { 59, 53 }, { 53, 59 }, { 58, 54 }, { 54, 58 },
+ { 57, 55 }, { 55, 57 }, { 56, 56 }, { 63, 50 },
+ { 50, 63 }, { 62, 51 }, { 51, 62 }, { 61, 52 },
+ { 52, 61 }, { 60, 53 }, { 53, 60 }, { 59, 54 },
+ { 54, 59 }, { 58, 55 }, { 55, 58 }, { 57, 56 },
+ { 56, 57 }, { 63, 51 }, { 51, 63 }, { 62, 52 },
+ { 52, 62 }, { 61, 53 }, { 53, 61 }, { 60, 54 },
+ { 54, 60 }, { 59, 55 }, { 55, 59 }, { 58, 56 },
+ { 56, 58 }, { 57, 57 }, { 63, 52 }, { 52, 63 },
+ { 62, 53 }, { 53, 62 }, { 61, 54 }, { 54, 61 },
+ { 60, 55 }, { 55, 60 }, { 59, 56 }, { 56, 59 },
+ { 58, 57 }, { 57, 58 }, { 63, 53 }, { 53, 63 },
+ { 62, 54 }, { 54, 62 }, { 61, 55 }, { 55, 61 },
+ { 60, 56 }, { 56, 60 }, { 59, 57 }, { 57, 59 },
+ { 58, 58 }, { 63, 54 }, { 54, 63 }, { 62, 55 },
+ { 55, 62 }, { 61, 56 }, { 56, 61 }, { 60, 57 },
+ { 57, 60 }, { 59, 58 }, { 58, 59 }, { 63, 55 },
+ { 55, 63 }, { 62, 56 }, { 56, 62 }, { 61, 57 },
+ { 57, 61 }, { 60, 58 }, { 58, 60 }, { 59, 59 },
+ { 63, 56 }, { 56, 63 }, { 62, 57 }, { 57, 62 },
+ { 61, 58 }, { 58, 61 }, { 60, 59 }, { 59, 60 },
+ { 63, 57 }, { 57, 63 }, { 62, 58 }, { 58, 62 },
+ { 61, 59 }, { 59, 61 }, { 60, 60 }, { 63, 58 },
+ { 58, 63 }, { 62, 59 }, { 59, 62 }, { 61, 60 },
+ { 60, 61 }, { 63, 59 }, { 59, 63 }, { 62, 60 },
+ { 60, 62 }, { 61, 61 }, { 63, 60 }, { 60, 63 },
+ { 62, 61 }, { 61, 62 }, { 63, 61 }, { 61, 63 },
+ { 62, 62 }, { 63, 62 }, { 62, 63 }, { 63, 63 }
+};
+
+static const int daala_layout_offset4x4[] = { 0 };
+static const int daala_layout_offset8x8[] = { 0, 8, 16 };
+static const int daala_layout_offset16x16[] = { 0, 32, 64 };
+static const int daala_layout_offset32x32[] = { 0, 128 };
+static const int daala_layout_offset64x64[] = { 0 };
+
+static const int daala_band_offset4x4[] = { 1, 16 };
+static const int daala_band_offset8x8[] = { 1, 16, 24, 32, 64 };
+static const int daala_band_offset16x16[] = { 1, 16, 24, 32, 64, 96, 128, 256 };
+static const int daala_band_offset32x32[] = { 1, 16, 24, 32, 64, 96, 128, 256, 384, 512 };
+static const int daala_band_offset64x64[] = { 1, 16, 24, 32, 64, 96, 128, 256, 384, 512 };
+
+const DaalaBandLayout ff_daala_layouts[] = {
+ {
+ daala_zigzag4x4,
+ FF_ARRAY_ELEMS(daala_zigzag4x4),
+ daala_layout_offset4x4,
+ FF_ARRAY_ELEMS(daala_layout_offset4x4),
+ daala_band_offset4x4,
+ FF_ARRAY_ELEMS(daala_band_offset4x4) - 1
+ },
+ {
+ daala_zigzag8x8,
+ FF_ARRAY_ELEMS(daala_zigzag8x8),
+ daala_layout_offset8x8,
+ FF_ARRAY_ELEMS(daala_layout_offset8x8),
+ daala_band_offset8x8,
+ FF_ARRAY_ELEMS(daala_band_offset8x8) - 1
+ },
+ {
+ daala_zigzag16x16,
+ FF_ARRAY_ELEMS(daala_zigzag16x16),
+ daala_layout_offset16x16,
+ FF_ARRAY_ELEMS(daala_layout_offset16x16),
+ daala_band_offset16x16,
+ FF_ARRAY_ELEMS(daala_band_offset16x16) - 1
+ },
+ {
+ daala_zigzag32x32,
+ FF_ARRAY_ELEMS(daala_zigzag32x32),
+ daala_layout_offset32x32,
+ FF_ARRAY_ELEMS(daala_layout_offset32x32),
+ daala_band_offset32x32,
+ FF_ARRAY_ELEMS(daala_band_offset32x32) - 1
+ },
+ {
+ daala_zigzag64x64,
+ FF_ARRAY_ELEMS(daala_zigzag64x64),
+ daala_layout_offset64x64,
+ FF_ARRAY_ELEMS(daala_layout_offset64x64),
+ daala_band_offset64x64,
+ FF_ARRAY_ELEMS(daala_band_offset64x64) - 1
+ }
+};
+
+static const double daala_basis_qm_mag_4x4_l[] = {
+ 0.870774f, 0.872037f, 0.949493f, 0.947936f
+};
+
+static const double daala_basis_qm_mag_8x8_l[] = {
+ 0.936496f, 0.892830f, 0.938452f, 0.970087f,
+ 0.974272f, 0.967954f, 0.974035f, 0.990480f
+};
+static const double daala_basis_qm_mag_16x16_l[] = {
+ 0.968807f, 0.940969f, 0.947977f, 0.957741f,
+ 0.969762f, 0.978644f, 0.984885f, 0.988009f,
+ 0.987424f, 0.985569f, 0.984215f, 0.984462f,
+ 0.987205f, 0.991415f, 0.994985f, 0.998237f
+};
+static const double daala_basis_qm_mag_32x32_l[] = {
+ 0.985068f, 0.970006f, 0.969893f, 0.973192f,
+ 0.973444f, 0.975881f, 0.979601f, 0.981070f,
+ 0.984989f, 0.987520f, 0.988830f, 0.990983f,
+ 0.992376f, 0.992884f, 0.993447f, 0.993381f,
+ 0.993712f, 0.994060f, 0.993294f, 0.992392f,
+ 0.991338f, 0.992410f, 0.992051f, 0.993874f,
+ 0.993488f, 0.994162f, 0.995318f, 0.995925f,
+ 0.997475f, 0.999027f, 0.998303f, 1.001413f,
+};
+static const double daala_basis_qm_mag_64x64_l[] = {
+ 0.992453f, 0.984930f, 0.985137f, 0.985029f,
+ 0.985514f, 0.985784f, 0.986269f, 0.986854f,
+ 0.989932f, 0.987780f, 0.988269f, 0.989175f,
+ 0.989951f, 0.990466f, 0.991145f, 0.991839f,
+ 0.990773f, 0.993191f, 0.993618f, 0.994221f,
+ 0.994662f, 0.995259f, 0.995826f, 0.995996f,
+ 0.999070f, 0.996624f, 0.996835f, 0.996948f,
+ 0.997022f, 0.996973f, 0.996993f, 0.996996f,
+ 0.996871f, 0.996828f, 0.996598f, 0.996688f,
+ 0.996845f, 0.996407f, 0.996327f, 0.996435f,
+ 0.999173f, 0.996216f, 0.995981f, 0.996173f,
+ 0.996595f, 0.996334f, 0.996512f, 0.996627f,
+ 0.994976f, 0.997113f, 0.997248f, 0.997548f,
+ 0.997943f, 0.998121f, 0.998291f, 0.998687f,
+ 1.001696f, 0.999133f, 0.999315f, 0.999621f,
+ 0.999745f, 0.999905f, 0.999936f, 1.000075f
+};
+
+static const double daala_basis_qm_mag_4x4_c420[] = {
+ 0.870774f, 0.872037f, 0.949493f, 0.947936f
+};
+static const double daala_basis_qm_mag_8x8_c420[] = {
+ 0.936496f, 0.892830f, 0.938452f, 0.970087f,
+ 0.974272f, 0.967954f, 0.974035f, 0.990480f
+};
+static const double daala_basis_qm_mag_16x16_c420[] = {
+ 0.968807f, 0.940969f, 0.947977f, 0.957741f,
+ 0.969762f, 0.978644f, 0.984885f, 0.988009f,
+ 0.987424f, 0.985569f, 0.984215f, 0.984462f,
+ 0.987205f, 0.991415f, 0.994985f, 0.998237f
+};
+static const double daala_basis_qm_mag_32x32_c420[] = {
+ 0.985068f, 0.970006f, 0.969893f, 0.973192f,
+ 0.973444f, 0.975881f, 0.979601f, 0.981070f,
+ 0.984989f, 0.987520f, 0.988830f, 0.990983f,
+ 0.992376f, 0.992884f, 0.993447f, 0.993381f,
+ 0.993712f, 0.994060f, 0.993294f, 0.992392f,
+ 0.991338f, 0.992410f, 0.992051f, 0.993874f,
+ 0.993488f, 0.994162f, 0.995318f, 0.995925f,
+ 0.997475f, 0.999027f, 0.998303f, 1.001413f
+};
+static const double daala_basis_qm_mag_64x64_c420[] = {
+ 0.992453f, 0.984930f, 0.985137f, 0.985029f,
+ 0.985514f, 0.985784f, 0.986269f, 0.986854f,
+ 0.989932f, 0.987780f, 0.988269f, 0.989175f,
+ 0.989951f, 0.990466f, 0.991145f, 0.991839f,
+ 0.990773f, 0.993191f, 0.993618f, 0.994221f,
+ 0.994662f, 0.995259f, 0.995826f, 0.995996f,
+ 0.999070f, 0.996624f, 0.996835f, 0.996948f,
+ 0.997022f, 0.996973f, 0.996993f, 0.996996f,
+ 0.996871f, 0.996828f, 0.996598f, 0.996688f,
+ 0.996845f, 0.996407f, 0.996327f, 0.996435f,
+ 0.999173f, 0.996216f, 0.995981f, 0.996173f,
+ 0.996595f, 0.996334f, 0.996512f, 0.996627f,
+ 0.994976f, 0.997113f, 0.997248f, 0.997548f,
+ 0.997943f, 0.998121f, 0.998291f, 0.998687f,
+ 1.001696f, 0.999133f, 0.999315f, 0.999621f,
+ 0.999745f, 0.999905f, 0.999936f, 1.000075f
+};
+
+const double *ff_daala_basis_qm_mag[2][DAALA_NBSIZES+1] = {
+ {
+ daala_basis_qm_mag_4x4_l,
+ daala_basis_qm_mag_8x8_l,
+ daala_basis_qm_mag_16x16_l,
+ daala_basis_qm_mag_32x32_l,
+ daala_basis_qm_mag_64x64_l
+ },
+ {
+ daala_basis_qm_mag_4x4_c420,
+ daala_basis_qm_mag_8x8_c420,
+ daala_basis_qm_mag_16x16_c420,
+ daala_basis_qm_mag_32x32_c420,
+ daala_basis_qm_mag_64x64_c420
+ }
+};
+
+static const double daala_pvq_beta_4x4_l[] = {1.0f};
+static const double daala_pvq_beta_8x8_l[] = {1.0f, 1.0f, 1.0f, 1.0f};
+static const double daala_pvq_beta_16x16_l[] = {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f};
+static const double daala_pvq_beta_32x32_l[] = {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f};
+static const double daala_pvq_beta_64x64_l[] = {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f};
+
+static const double daala_pvq_beta_4x4_am_l[] = {1.0f};
+static const double daala_pvq_beta_8x8_am_l[] = {1.5f, 1.5f, 1.5f, 1.5f};
+static const double daala_pvq_beta_16x16_am_l[] = {1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f};
+static const double daala_pvq_beta_32x32_am_l[] = {1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f};
+static const double daala_pvq_beta_64x64_am_l[] = {1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f};
+
+static const double daala_pvq_beta_4x4_c[] = {1.0f};
+static const double daala_pvq_beta_8x8_c[] = {1.0f, 1.0f, 1.0f, 1.0f};
+static const double daala_pvq_beta_16x16_c[] = {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f};
+static const double daala_pvq_beta_32x32_c[] = {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f};
+static const double daala_pvq_beta_64x64_c[] = {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f};
+
+const double *const ff_daalapvq_beta[2][DAALA_MAX_PLANES][DAALA_NBSIZES + 1] = {
+ {
+ {
+ daala_pvq_beta_4x4_l,
+ daala_pvq_beta_8x8_l,
+ daala_pvq_beta_16x16_l,
+ daala_pvq_beta_32x32_l,
+ daala_pvq_beta_64x64_l
+ },
+ {
+ daala_pvq_beta_4x4_c,
+ daala_pvq_beta_8x8_c,
+ daala_pvq_beta_16x16_c,
+ daala_pvq_beta_32x32_c,
+ daala_pvq_beta_64x64_c
+ },
+ {
+ daala_pvq_beta_4x4_c,
+ daala_pvq_beta_8x8_c,
+ daala_pvq_beta_16x16_c,
+ daala_pvq_beta_32x32_c,
+ daala_pvq_beta_64x64_c
+ },
+ {
+ daala_pvq_beta_4x4_c,
+ daala_pvq_beta_8x8_c,
+ daala_pvq_beta_16x16_c,
+ daala_pvq_beta_32x32_c,
+ daala_pvq_beta_64x64_c
+ }
+ },
+ {
+ {
+ daala_pvq_beta_4x4_am_l,
+ daala_pvq_beta_8x8_am_l,
+ daala_pvq_beta_16x16_am_l,
+ daala_pvq_beta_32x32_am_l,
+ daala_pvq_beta_64x64_am_l
+ },
+ {
+ daala_pvq_beta_4x4_c,
+ daala_pvq_beta_8x8_c,
+ daala_pvq_beta_16x16_c,
+ daala_pvq_beta_32x32_c,
+ daala_pvq_beta_64x64_c
+ },
+ {
+ daala_pvq_beta_4x4_c,
+ daala_pvq_beta_8x8_c,
+ daala_pvq_beta_16x16_c,
+ daala_pvq_beta_32x32_c,
+ daala_pvq_beta_64x64_c
+ },
+ {
+ daala_pvq_beta_4x4_c,
+ daala_pvq_beta_8x8_c,
+ daala_pvq_beta_16x16_c,
+ daala_pvq_beta_32x32_c,
+ daala_pvq_beta_64x64_c
+ }
+ }
+};
diff --git a/libavcodec/daalatab.h b/libavcodec/daalatab.h
new file mode 100644
index 0000000..e8e8264
--- /dev/null
+++ b/libavcodec/daalatab.h
@@ -0,0 +1,108 @@
+/*
+ * Daala video decoder
+ *
+ * Copyright (C) 2015-2016 Rostislav Pehlivanov <atomnuker at gmail.com>
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * This file incorporates work covered by the following copyright and
+ * permission notice:
+ *
+ * Copyright 2001-2015 Xiph.Org and contributors.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+ * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef AVCODEC_DAALATAB_H
+#define AVCODEC_DAALATAB_H
+
+#include "daala.h"
+
+typedef uint8_t daala_u8_doublet[2];
+
+typedef struct DaalaBandLayout {
+ const daala_u8_doublet *const tab;
+ const int tab_size;
+ const int *layout_offset;
+ const int layout_offset_size;
+ const int *band_offset;
+ const int band_offset_size;
+} DaalaBandLayout;
+
+struct DaalaPixFmts {
+ enum AVPixelFormat fmt;
+ int planes, depth, depth_mode;
+ int dec[DAALA_MAX_PLANES][2];
+};
+
+extern const struct DaalaPixFmts ff_daala_valid_formats[];
+extern const int ff_daala_valid_formats_num;
+
+/* Haar "quantization matrix" for each decomposition level */
+extern const uint8_t ff_daala_haar_qm[][DAALA_LOG_BSIZE_MAX];
+
+/* Per-plane keyframe bilinear blur filter strength */
+extern const uint8_t ff_daala_bilinear_blur[];
+
+/* Haar basis scaling compensation, [0] - x,y; [1] - diag */
+extern const uint8_t ff_daala_dc_comp[][2];
+
+/* Quantization matrices, currently only flat (0) and HVS(1) */
+extern const uint8_t *const ff_daala_qmatrices[];
+extern const int ff_daala_qmatrices_num;
+
+/* Chroma from luma scaling */
+extern const uint8_t ff_daaladsp_cfl_scale[4][4];
+
+/* Maps quantizers */
+extern const int ff_daala_quant_codemap[];
+extern const int ff_daala_quant_codemap_size;
+
+extern const ent_rng ff_daalaent_cdf_tab[];
+
+extern const ent_rng ff_daalaent_cdf_exp_tab[][16];
+extern const ent_rng ff_daalaent_laplace_offset[];
+
+extern const DaalaBandLayout ff_daala_layouts[];
+
+extern const double *ff_daala_basis_qm_mag[2][DAALA_NBSIZES+1];
+
+/* PVQ beta angles for enabling activity masking */
+extern const double *const ff_daalapvq_beta[2][DAALA_MAX_PLANES][DAALA_NBSIZES + 1];
+
+#endif /* AVCODEC_DAALATAB_H */
--
2.6.4
More information about the ffmpeg-devel
mailing list