[FFmpeg-devel] [RFC v2 3/3] daaladec: Implement a native Daala decoder
Rostislav Pehlivanov
atomnuker at gmail.com
Tue Dec 29 03:12:44 CET 2015
Hmm, kinda odd.
I've attached a v2 of the RFC Patch which does this:
1. Fixed some typos.
2. Fixed some missing newlines at the end of daalatab*
3. Moves the pix_fmt finding function inside the main decoder file.
Try building now.
Signed-off-by: Rostislav Pehlivanov <atomnuker at gmail.com>
---
libavcodec/Makefile | 1 +
libavcodec/allcodecs.c | 1 +
libavcodec/daala.h | 78 ++
libavcodec/daala_entropy.h | 554 ++++++++++++
libavcodec/daala_pvq.h | 369 ++++++++
libavcodec/daala_utils.h | 202 +++++
libavcodec/daaladec.c | 804 +++++++++++++++++
libavcodec/daaladsp.c | 2123 ++++++++++++++++++++++++++++++++++++++++++++
libavcodec/daaladsp.h | 78 ++
libavcodec/daalatab.c | 1544 ++++++++++++++++++++++++++++++++
libavcodec/daalatab.h | 85 ++
11 files changed, 5839 insertions(+)
create mode 100644 libavcodec/daala.h
create mode 100644 libavcodec/daala_entropy.h
create mode 100644 libavcodec/daala_pvq.h
create mode 100644 libavcodec/daala_utils.h
create mode 100644 libavcodec/daaladec.c
create mode 100644 libavcodec/daaladsp.c
create mode 100644 libavcodec/daaladsp.h
create mode 100644 libavcodec/daalatab.c
create mode 100644 libavcodec/daalatab.h
diff --git a/libavcodec/Makefile b/libavcodec/Makefile
index 1c7568b..85d2e00 100644
--- a/libavcodec/Makefile
+++ b/libavcodec/Makefile
@@ -220,6 +220,7 @@ OBJS-$(CONFIG_COMFORTNOISE_ENCODER) += cngenc.o
OBJS-$(CONFIG_CPIA_DECODER) += cpia.o
OBJS-$(CONFIG_CSCD_DECODER) += cscd.o
OBJS-$(CONFIG_CYUV_DECODER) += cyuv.o
+OBJS-$(CONFIG_DAALA_DECODER) += daaladec.o daalatab.o daaladsp.o
OBJS-$(CONFIG_DCA_DECODER) += dcadec.o dca.o dcadsp.o \
dcadata.o dca_exss.o \
dca_xll.o synth_filter.o
diff --git a/libavcodec/allcodecs.c b/libavcodec/allcodecs.c
index 4eeb6f3..43d99b3 100644
--- a/libavcodec/allcodecs.c
+++ b/libavcodec/allcodecs.c
@@ -154,6 +154,7 @@ void avcodec_register_all(void)
REGISTER_DECODER(CPIA, cpia);
REGISTER_DECODER(CSCD, cscd);
REGISTER_DECODER(CYUV, cyuv);
+ REGISTER_DECODER(DAALA, daala);
REGISTER_DECODER(DDS, dds);
REGISTER_DECODER(DFA, dfa);
REGISTER_DECODER(DIRAC, dirac);
diff --git a/libavcodec/daala.h b/libavcodec/daala.h
new file mode 100644
index 0000000..535e78f
--- /dev/null
+++ b/libavcodec/daala.h
@@ -0,0 +1,78 @@
+/*
+ * Daala video decoder
+ *
+ * Copyright (C) 2015 Rostislav Pehlivanov <atomnuker at gmail.com>
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#ifndef AVCODEC_DAALA_H
+#define AVCODEC_DAALA_H
+
+#include "avcodec.h"
+
+/* Essential typedefs */
+typedef uint32_t ent_win; /* Has to be able to express 32bit uint nums */
+typedef uint16_t ent_rng;
+typedef int32_t dctcoef;
+
+#define daalaent_log2(x) (int)(1 + ff_log2(x))
+
+/* Block sizes */
+enum DaalaBsize {
+ DAALA_BLOCKSIZE_4x4 = 0,
+ DAALA_BLOCKSIZE_8x8,
+ DAALA_BLOCKSIZE_16x16,
+ DAALA_BLOCKSIZE_32x32,
+ DAALA_BLOCKSIZE_64x64,
+
+ DAALA_NBSIZES
+};
+
+#define DAALA_MAX_REF_FRAMES 2 /* Maximum number of reference frames */
+#define DAALA_MAX_PLANES 4 /* Redundant but makes loops more descriptive */
+#define DAALA_LOG_BSIZE0 2
+#define DAALA_LOG_BSIZE_MAX (DAALA_LOG_BSIZE0 + DAALA_NBSIZES - 1)
+#define DAALA_BSIZE_MAX (1 << DAALA_LOG_BSIZE_MAX)
+#define DAALA_BSIZE_GRID (1 << (DAALA_NBSIZES - 2))
+#define DAALA_QM_SIZE DAALA_NBSIZES*(DAALA_NBSIZES + 1)
+#define DAALA_CSHIFT 4
+
+#define DAALA_QM_SCALE (1 << 15)
+#define DAALA_QM_SCALE_MAX (DAALA_QM_SCALE - 1)
+#define DAALA_QM_SCALE_UNIT (1.0f/DAALA_QM_SCALE_MAX)
+#define DAALA_QM_INV_SCALE (1 << 12)
+#define DAALA_QM_INV_SCALE_UNIT (1.0f/DAALA_QM_INV_SCALE)
+#define DAALA_QM_BSIZE (DAALA_BSIZE_MAX*DAALA_BSIZE_MAX)
+#define DAALA_QM_BUFFER_SIZE (DAALA_NBSIZES*2*DAALA_QM_BSIZE)
+
+typedef struct DaalaBitstreamHeader {
+ uint8_t key_frame;
+ uint8_t bipred;
+ uint8_t ref_num;
+ uint8_t act_mask;
+ uint8_t qm;
+ uint8_t haar;
+ uint8_t golden;
+ uint8_t pvq_qm[DAALA_MAX_PLANES][DAALA_QM_SIZE];
+} DaalaBitstreamHeader;
+
+typedef struct DaalaSharedContext {
+ DaalaBitstreamHeader h;
+} DaalaSharedContext;
+
+#endif /* AVCODEC_DAALA_H */
diff --git a/libavcodec/daala_entropy.h b/libavcodec/daala_entropy.h
new file mode 100644
index 0000000..3fdcaef
--- /dev/null
+++ b/libavcodec/daala_entropy.h
@@ -0,0 +1,554 @@
+/*
+ * Copyright 2001-2015 Xiph.Org and contributors.
+ * Copyright 2015 Rostislav Pehlivanov <atomnuker at gmail.com>
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+ * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef AVCODEC_DAALAENTROPY_H
+#define AVCODEC_DAALAENTROPY_H
+
+#include "libavutil/avassert.h"
+
+#include "daala.h"
+#include "daalatab.h"
+
+#define DAALAENT_CDF_ACCESS(n) &daalaent_cdf_tab[((n)*((n) - 1) >> 1) - 1]
+
+#define DAALAENT_WSIZE (int)sizeof(ent_win)*CHAR_BIT
+#define DAALAENT_BIT_ABUNDANCE 16384
+#define DAALAENT_UINT_BITS 4
+#define DAALAENT_MODEL_TAB 12
+#define DAALAENT_SAT(a,b) (a - FFMIN(a,b))
+
+#define DAALAENT_PVQ_COUNT 2
+#define DAALAENT_PVQ_COUNT_EX 3
+#define DAALAENT_PVQ_K 0
+#define DAALAENT_PVQ_SUM_EX 1
+#define DAALAENT_PVQ_NOVAL (-2147483647-1)
+
+enum DaalaCDFDecodeType {
+ CDF_NORM = 0,
+ CDF_Q15,
+ CDF_UNSCALED,
+ CDF_DYADIC,
+};
+
+typedef struct DaalaCDF {
+ ent_rng *cdf;
+ int x, y, inc, inc_g, fir;
+ uint8_t gen_mod;
+} DaalaCDF;
+
+typedef struct DaalaEntropy {
+ const uint8_t *buf, *ebuf; /* Normal */
+ const uint8_t *rbuf, *erbuf; /* Raw */
+ ent_rng range;
+ ent_win diff, end_window;
+ int16_t count;
+ int eos_offset, end_window_size;
+ uint8_t err;
+} DaalaEntropy;
+
+/* Expectation value log, outputs Q1 */
+static av_always_inline int daalaent_log_ex(int ex_q16)
+{
+ int o, log = daalaent_log2(ex_q16);
+ if (log < 15) {
+ o = ex_q16*ex_q16 > 2 << 2*log;
+ } else {
+ int tmp = ex_q16 >> (log - 8);
+ o = tmp*tmp > (1 << 15);
+ }
+ return FFMAX(0, 2*log - 33 + o);
+}
+
+/* Number of bits read */
+static av_always_inline int daalaent_bits_count(DaalaEntropy *e)
+{
+ return ((e->ebuf - e->erbuf) + (e->buf - e->rbuf))*8 - \
+ e->count - e->end_window_size + e->eos_offset;
+}
+
+/* Updates the context */
+static av_always_inline void daalaent_fillup(DaalaEntropy *e)
+{
+ int i = DAALAENT_WSIZE - 9 - (e->count + 15);
+ const uint8_t *ebuf = e->ebuf, *buf = e->buf;
+ for (; i >= 0 && buf < ebuf; i -= 8, buf++) {
+ av_assert0(i <= DAALAENT_WSIZE - 8);
+ e->diff |= (ent_win)buf[0] << i;
+ e->count += 8;
+ }
+ if (buf >= ebuf) {
+ e->eos_offset += DAALAENT_BIT_ABUNDANCE - e->count;
+ e->count = DAALAENT_BIT_ABUNDANCE;
+ }
+ e->ebuf = ebuf;
+ e->buf = buf;
+}
+
+/* Updates the generic exponential probability model */
+static av_always_inline void daalaent_exp_model_update(DaalaCDF *c, int *ex, int x,
+ int xs, int id, int integrate)
+{
+ int i, xenc;
+ ent_rng *cdf = &c->cdf[id*c->y];
+ if (cdf[15] + c->inc > 32767) {
+ for (i = 0; i < 16; i++)
+ cdf[i] = (cdf[i] >> 1) + i + 1;
+ }
+ xenc = FFMIN(15, xs);
+ for (i = xenc; i < 16; i++)
+ cdf[i] += c->inc;
+ x = FFMIN(x, 32767);
+ *ex += ((x << 16) - *ex) >> integrate;
+}
+
+
+/* Renormalizes */
+static inline void daalaent_renormalize(DaalaEntropy *e, uint32_t diff,
+ ent_rng range)
+{
+ int i = 16 - daalaent_log2(range);
+ e->diff = diff << i;
+ e->range = range << i;
+ if ((e->count -= i) < 0)
+ daalaent_fillup(e);
+}
+
+/* Decodes a bool from the bitstream, p ∈ (0, p_tot), p_tot ∈ [16384, 32768] */
+static inline uint8_t daalaent_decode_bool(DaalaEntropy *e, uint32_t p,
+ uint32_t p_tot)
+{
+ uint8_t rval;
+ long int diff = e->range - p_tot, tmp = diff >= p_tot;
+ av_assert0(e->diff >> (DAALAENT_WSIZE - 16) < e->range);
+ p <<= tmp;
+ p_tot <<= tmp;
+ tmp = DAALAENT_SAT(2*diff, p_tot);
+ tmp = p + FFMIN(p, tmp) + FFMIN(DAALAENT_SAT(p, tmp) >> 1, diff);
+ diff = tmp << (DAALAENT_WSIZE - 16);
+ rval = e->diff >= diff;
+ diff = e->diff - (rval ? diff : 0);
+ tmp = rval ? e->range - tmp : tmp;
+ daalaent_renormalize(e, diff, tmp);
+ return rval;
+}
+
+/* Decodes a symbol from a CDF table */
+static inline int daalaent_decode_cdf(DaalaEntropy *e, const ent_rng *cdf,
+ int cdf_size, uint32_t p_tot,
+ enum DaalaCDFDecodeType type)
+{
+ int d, lim, g, scale, ret = 0;
+ ent_rng range = e->range;
+ ent_win diff = e->diff, u = 0, v = 0;
+ const int cshift = DAALAENT_WSIZE - 16;
+ const int cval = diff >> cshift;
+ av_assert0(diff >> cshift < range); /* Probably the most important assert */
+ if (type == CDF_UNSCALED) {
+ p_tot = cdf[cdf_size - 1];
+ av_assert0(2 <= p_tot && p_tot <= 32768);
+ scale = 15 - daalaent_log2(p_tot - 1);
+ p_tot <<= scale;
+ av_assert0(p_tot <= range);
+ if (range - p_tot >= p_tot) {
+ p_tot <<= 1;
+ scale++;
+ }
+ d = range - p_tot;
+ } else if (type == CDF_Q15) {
+ av_assert0(cdf[cdf_size - 1] == 32768);
+ av_assert0(32768 <= range);
+ d = range - 32768;
+ p_tot = 32768;
+ scale = 0;
+ } else if (type == CDF_DYADIC) {
+ av_assert0(cdf[cdf_size - 1] == 1 << p_tot);
+ scale = 15 - p_tot;
+ av_assert0(32768 <= range);
+ d = range - 32768;
+ p_tot = 32768;
+ } else {
+ p_tot = cdf[cdf_size - 1];
+ av_assert0(16384 <= p_tot && p_tot <= 32768);
+ av_assert0(p_tot <= range);
+ scale = range - p_tot >= p_tot;
+ p_tot <<= scale;
+ d = range - p_tot;
+ }
+ g = DAALAENT_SAT(2*d, p_tot);
+ lim = FFMAX(FFMAX(cval >> 1, cval - d), (2*cval + 1 - g)/3) >> scale;
+ for (v = cdf[ret]; v <= lim; v = cdf[++ret])
+ u = v;
+ u <<= scale;
+ v <<= scale;
+ u = u + FFMIN(u, g) + FFMIN(DAALAENT_SAT(u, g) >> 1, d);
+ v = v + FFMIN(v, g) + FFMIN(DAALAENT_SAT(v, g) >> 1, d);
+ range = v - u;
+ diff -= u << cshift;
+ daalaent_renormalize(e, diff, range);
+ return ret;
+}
+
+/* Decodes raw bits from the bitstream, num ∈ [0, 25] */
+static inline ent_win daalaent_decode_bits(DaalaEntropy *e, int num)
+{
+ int avail = e->end_window_size;
+ ent_win ret, win = e->end_window;
+ if (avail < num) {
+ const uint8_t *erbuf = e->erbuf;
+ av_assert0(avail <= DAALAENT_WSIZE - 8);
+ do {
+ if (erbuf <= e->rbuf) {
+ e->eos_offset += DAALAENT_BIT_ABUNDANCE - avail;
+ avail = DAALAENT_BIT_ABUNDANCE;
+ break;
+ }
+ win |= (ent_win)*--erbuf << avail;
+ avail += 8;
+ } while (avail <= DAALAENT_WSIZE - 8);
+ e->erbuf = erbuf;
+ }
+ ret = win & ((1 << num) - 1);
+ win >>= num;
+ avail -= num;
+ e->end_window = win;
+ e->end_window_size = avail;
+ return ret;
+}
+
+/* Gets a single bit if !!cond and returns ±1 */
+static av_always_inline int8_t _daalaent_cphase(DaalaEntropy *e, uint8_t cond)
+{
+ if (cond)
+ return 1 - 2*daalaent_decode_bits(e, 1);
+ return 1;
+}
+#define daalaent_cphase(x,y) _daalaent_cphase(x, !!y)
+
+/* "+derf | It was a hack for the screen coding wavelet tools." */
+static inline int daalaent_decode_unary(DaalaEntropy *e)
+{
+ int rval = 0;
+ while (!daalaent_decode_bits(e, 1))
+ rval++;
+ return rval;
+}
+
+/* Decodes a uint from the bitstream, num ∈ [2, 2^29] */
+static inline ent_win daalaent_decode_uint(DaalaEntropy *e, ent_win num)
+{
+ av_assert0(num <= 1 << (25 + DAALAENT_UINT_BITS));
+ if (num > 1 << DAALAENT_UINT_BITS) {
+ int bit = daalaent_log2(--num) - DAALAENT_UINT_BITS;
+ int adr = (num >> bit) + 1;
+ ent_win t = daalaent_decode_cdf(e, DAALAENT_CDF_ACCESS(adr), adr,
+ 0, CDF_Q15);
+ t = t << bit | daalaent_decode_bits(e, bit);
+ if (t <= num)
+ return t;
+ e->err = 1;
+ return num;
+ }
+ return daalaent_decode_cdf(e, DAALAENT_CDF_ACCESS(num), num, 0, CDF_Q15);
+}
+
+static inline int daalaent_decode_cdf_adapt(DaalaEntropy *e, DaalaCDF *c,
+ int cdf_offset, int num)
+{
+ int i;
+ ent_rng *cdf = &c->cdf[cdf_offset*c->y];
+ const int rval = daalaent_decode_cdf(e, cdf, num, 0, CDF_UNSCALED);
+ if (cdf[num - 1] + c->inc > 32767) {
+ for (i = 0; i < num; i++)
+ cdf[i] = (cdf[i] >> 1) + i + 1;
+ }
+ for (i = rval; i < num; i++)
+ cdf[i] += c->inc;
+ return rval;
+}
+
+/* "Special laplace decoder" */
+static inline int daalaent_decode_laplace(DaalaEntropy *e, unsigned int decay,
+ int max)
+{
+ const ent_rng *cdf;
+ int pos, sym, max_shift, shift = 0, p_shift = 0;
+ if (!max)
+ return 0;
+ while (((max >> shift) >= 15 || max == -1) && decay > 235) {
+ decay = (decay*decay + 128) >> 8;
+ shift++;
+ }
+ max_shift = max >> shift;
+ decay = FFMAX(FFMIN(decay, 254), 2);
+ cdf = daalaent_cdf_exp_tab[(decay + 1) >> 1];
+ do {
+ uint8_t shift_bound = max_shift > 0 && max_shift < 15;
+ int cdf_size = shift_bound ? max_shift + 1 : 16;
+ int cdf_type = shift_bound ? CDF_UNSCALED : CDF_Q15;
+ sym = daalaent_decode_cdf(e, cdf, cdf_size, 0, cdf_type);
+ p_shift += sym;
+ max_shift -= 15;
+ } while (sym >= 15 && max_shift);
+ pos = shift ? (p_shift << shift) + daalaent_decode_bits(e, shift) : p_shift;
+ av_assert0(pos >> shift <= max >> shift || max == -1);
+ if (max != -1 && pos > max) {
+ pos = max;
+ e->err = 1;
+ }
+ return pos;
+}
+
+/* Used by the vector and delta laplace decoding functions for PVQ */
+static inline int daalaent_decode_laplace_pvq(DaalaEntropy *e, unsigned int exp_v,
+ int max_mod_v)
+{
+ int sym = 0, lsb = 0;
+ const int shift = FFMAX(daalaent_log2(exp_v) - 11, 0);
+ const int ex = (exp_v + (1 << shift >> 1)) >> shift;
+ const int maxval = (max_mod_v + (1 << shift >> 1)) >> shift;
+ /* The reference decoder uses snake oil DIVU optimization here */
+ const int decay = FFMIN(254, 256*ex/(ex + 256));
+ const int offset = daalaent_laplace_offset[(decay + 1) >> 1];
+ if (maxval) {
+ ent_rng i, cdf[16];
+ for (i = 0; i < 16; i++)
+ cdf[i] = daalaent_cdf_exp_tab[(decay + 1) >> 1][i] - offset;
+ sym = daalaent_decode_cdf(e, cdf, FFMIN(maxval + 1, 16), 0, CDF_UNSCALED);
+ }
+ if (shift) {
+ if (shift - !sym > 0)
+ lsb = daalaent_decode_bits(e, shift - !sym);
+ lsb -= (!!sym << (shift - 1));
+ }
+ if (sym == 15) /* Tail */
+ sym += daalaent_decode_laplace(e, decay, maxval - 15);
+ return (sym << shift) + lsb;
+}
+
+static inline void daalaent_decode_laplace_delta(DaalaEntropy *e, dctcoef *y,
+ int n, int k, dctcoef *curr,
+ const dctcoef *means)
+{
+ int i, k0 = k, k_left = k;
+ int prev = 0, sum_ex = 0, sum_c = 0, pos = 0;
+ int coef = 256*means[DAALAENT_PVQ_COUNT]/(1 + means[DAALAENT_PVQ_COUNT_EX]);
+ memset(y, 0, n*sizeof(dctcoef));
+ coef = FFMAX(coef, 1);
+ for (i = 0; i < k0; i++) {
+ int count;
+ if (!i) {
+ int decay;
+ int ex = coef*(n - prev)/k_left;
+ if (ex > 65280)
+ decay = 255;
+ else
+ decay = FFMIN(255, (int)((256*ex/(ex + 256) + (ex>>5)*ex/((n + 1)*(n - 1)*(n - 1)))));
+ count = daalaent_decode_laplace(e, decay, n - 1);
+ }
+ else
+ count = daalaent_decode_laplace_pvq(e, coef*(n - prev)/k_left, n - prev - 1);
+ sum_ex += 256*(n - prev);
+ sum_c += count*k_left;
+ pos += count;
+ av_assert0(pos < n);
+ y[pos] += daalaent_cphase(e, !y[pos]);
+ prev = pos;
+ k_left--;
+ if (!k_left)
+ break;
+ }
+ if (k > 0) {
+ curr[DAALAENT_PVQ_COUNT] = 256*sum_c;
+ curr[DAALAENT_PVQ_COUNT_EX] = sum_ex;
+ }
+ else {
+ curr[DAALAENT_PVQ_COUNT] = -1;
+ curr[DAALAENT_PVQ_COUNT_EX] = 0;
+ }
+ curr[DAALAENT_PVQ_K] = 0;
+ curr[DAALAENT_PVQ_SUM_EX] = 0;
+}
+
+/* Decodes quantized coefficients from the bitsteam */
+static inline void daalaent_decode_laplace_vector(DaalaEntropy *e, dctcoef *y,
+ int n, int k, dctcoef *curr,
+ const dctcoef *means)
+{
+ int i, exp_q8, mean_k_q8, mean_sum_ex_q8, sum_ex = 0, kn = k, ran_delta = 0;
+ if (k <= 1) {
+ daalaent_decode_laplace_delta(e, y, n, k, curr, means);
+ return;
+ }
+ if (!k) {
+ curr[DAALAENT_PVQ_COUNT] = DAALAENT_PVQ_NOVAL;
+ curr[DAALAENT_PVQ_COUNT_EX] = DAALAENT_PVQ_NOVAL;
+ curr[DAALAENT_PVQ_K] = 0;
+ curr[DAALAENT_PVQ_SUM_EX] = 0;
+ memset(y, 0, n*sizeof(dctcoef));
+ return;
+ }
+ mean_k_q8 = means[DAALAENT_PVQ_K];
+ mean_sum_ex_q8 = means[DAALAENT_PVQ_SUM_EX];
+ if (mean_k_q8 < 1 << 23)
+ exp_q8 = 256*mean_k_q8/(1 + mean_sum_ex_q8);
+ else
+ exp_q8 = mean_k_q8/(1 + (mean_sum_ex_q8 >> 8));
+ for (i = 0; i < n; i++) {
+ int x, ex;
+ if (!kn)
+ break;
+ if (kn <= 1 && i != n - 1) {
+ daalaent_decode_laplace_delta(e, y + i, n - i, kn, curr, means);
+ ran_delta = 1;
+ i = n;
+ break;
+ }
+ ex = (2*exp_q8*kn + (n - i))/(2*(n - i));
+ if (ex > kn*256)
+ ex = kn*256;
+ sum_ex += (2*256*kn + (n - i))/(2*(n - i));
+ if (i != n - 1)
+ x = daalaent_decode_laplace_pvq(e, ex, kn);
+ else
+ x = kn;
+ y[i] = x*daalaent_cphase(e, x);
+ kn -= abs(x);
+ }
+ memset(&y[i], 0, (n - i)*sizeof(dctcoef)); /* Zero the rest */
+ if (!ran_delta) {
+ curr[DAALAENT_PVQ_COUNT] = DAALAENT_PVQ_NOVAL;
+ curr[DAALAENT_PVQ_COUNT_EX] = DAALAENT_PVQ_NOVAL;
+ }
+ curr[DAALAENT_PVQ_K] = k - kn;
+ curr[DAALAENT_PVQ_SUM_EX] = sum_ex;
+}
+
+/* Expectation value is in Q16 */
+static inline int daalaent_decode_generic(DaalaEntropy *e, DaalaCDF *c, int *ex,
+ int max, int integrate)
+{
+ int rval, lsb = 0, log_ex = daalaent_log_ex(*ex);
+ const int shift = FFMAX(0, (log_ex - 5) >> 1);
+ const int id = FFMIN(DAALAENT_MODEL_TAB - 1, log_ex);
+ const int ms = (max + (1 << shift >> 1)) >> shift;
+ int xs = (max == -1) ? 16 : FFMIN(ms + 1, 16);
+ ent_rng *cdf = &c->cdf[id*c->y];
+ if (!max)
+ return 0;
+ if ((xs = daalaent_decode_cdf(e, cdf, xs, 0, CDF_UNSCALED)) == 15) {
+ int g = ((2*(*ex) >> 8) + (1 << shift >> 1)) >> shift;
+ ent_win decay = FFMAX(2, FFMIN(254, 256*g/(g + 256)));
+ xs += daalaent_decode_laplace(e, decay, (max == -1) ? -1 : ms - 15);
+ }
+ if (shift) {
+ if (shift > !xs)
+ lsb = daalaent_decode_bits(e, shift - !xs);
+ lsb -= !!xs << (shift - 1);
+ }
+ rval = (xs << shift) + lsb;
+ daalaent_exp_model_update(c, ex, rval, xs, id, integrate);
+ return rval;
+}
+
+static inline void daalaent_decode_init(DaalaEntropy *e, const uint8_t *buf,
+ int buf_size)
+{
+ e->rbuf = buf;
+ e->erbuf = buf + buf_size;
+ e->buf = buf;
+ e->ebuf = buf + buf_size;
+ e->err = 0;
+ e->diff = 0;
+ e->range = 32768;
+ e->count = -15;
+ e->eos_offset = 18 - DAALAENT_WSIZE;
+ e->end_window = 0;
+ e->end_window_size = 0;
+ daalaent_fillup(e);
+}
+
+static av_always_inline void daalaent_cdf_reset(DaalaCDF *s)
+{
+ int i, j;
+ for (i = 0; i < s->x; i++)
+ for (j = 0; j < s->y; j++)
+ s->cdf[i*s->y + j] = s->inc_g*(j + s->gen_mod) + s->fir;
+}
+
+static inline int daalaent_cdf_alloc(DaalaCDF *s, int x, int y, int inc, int fir,
+ uint8_t inc_shift, uint8_t gen_mod)
+{
+ s->x = x;
+ s->y = y;
+ s->inc = inc;
+ s->gen_mod = gen_mod;
+ s->inc_g = s->inc >> inc_shift;
+ s->fir = !!fir || s->gen_mod ? fir : s->inc_g;
+ s->cdf = av_malloc(x*y*sizeof(ent_rng));
+ if (!s->cdf)
+ return 1;
+ return 0;
+}
+
+static av_always_inline void daalaent_cdf_free(DaalaCDF *s)
+{
+ if (!s)
+ return;
+ av_freep(&s->cdf);
+}
+
+/* Decodes the preamble at the start of every frame */
+static inline int daalaent_decode_frame_header(DaalaEntropy *e,
+ DaalaBitstreamHeader *h,
+ int planes)
+{
+ int i, j;
+ if (daalaent_decode_bool(e, 16384, 32768))
+ return 1;
+ h->key_frame = daalaent_decode_bool(e, 16384, 32768);
+ if (!h->key_frame) {
+ h->bipred = daalaent_decode_bool(e, 16384, 32768);
+ h->ref_num = 1 + daalaent_decode_uint(e, DAALA_MAX_REF_FRAMES);
+ } else {
+ h->ref_num = 0;
+ }
+ h->act_mask = daalaent_decode_bool(e, 16384, 32768);
+ h->qm = daalaent_decode_bool(e, 16384, 32768);
+ h->haar = daalaent_decode_bool(e, 16384, 32768);
+ h->golden = daalaent_decode_bool(e, 16384, 32768);
+ if (h->key_frame && planes) {
+ for (i = 0; i < planes; i++) {
+ for (j = 0; j < DAALA_QM_SIZE; j++) {
+ h->pvq_qm[i][j] = daalaent_decode_bits(e, 8);
+ }
+ }
+ }
+ return 0;
+}
+
+#endif /* AVCODEC_DAALAENTROPY_H */
diff --git a/libavcodec/daala_pvq.h b/libavcodec/daala_pvq.h
new file mode 100644
index 0000000..ea8a86b
--- /dev/null
+++ b/libavcodec/daala_pvq.h
@@ -0,0 +1,369 @@
+/*
+ * Copyright 2001-2015 Xiph.Org and contributors.
+ * Copyright 2015 Rostislav Pehlivanov <atomnuker at gmail.com>
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+ * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef AVCODEC_DAALAPVQ_H
+#define AVCODEC_DAALAPVQ_H
+
+#include "daala.h"
+#include "daala_entropy.h"
+
+#define DAALAPVQ_NUM_ADAPTS 4
+
+#define DAALAPVQ_SKIP_ZERO 1
+#define DAALAPVQ_SKIP_COPY 2
+#define DAALAPVQ_PARTITIONS_MAX 9
+
+#define DAALAPVQ_MAX_PART_SIZE (DAALA_QM_BSIZE/2)
+#define DAALAPVQ_COMPAND_SCALE (256 << DAALA_CSHIFT)
+#define DAALAPVQ_COMPAND_SCALE_1 (1.0f/DAALAPVQ_COMPAND_SCALE)
+
+/* PVQ Context struct */
+typedef struct DaalaPVQ {
+ uint8_t phase;
+ int nb_coeffs;
+ int size[DAALAPVQ_PARTITIONS_MAX];
+ int skip[DAALAPVQ_PARTITIONS_MAX];
+ int qmax[DAALAPVQ_PARTITIONS_MAX];
+ /* All of the above is set on every decode call */
+
+ /* Everything below is reset on every frame */
+ DaalaCDF pvqcodeword_cdf;
+ DaalaCDF pvqskip_cdf;
+ DaalaCDF pvqtheta_gain_cdf;
+ DaalaCDF pvqgain_ref_mcdf;
+ DaalaCDF pvqgain_noref_mcdf;
+ DaalaCDF pvqtheta_mcdf;
+ int pvqgain_ex[DAALA_MAX_PLANES][DAALA_NBSIZES][DAALAPVQ_PARTITIONS_MAX];
+ int pvqtheta_ex[DAALA_NBSIZES*DAALAPVQ_PARTITIONS_MAX];
+ int pvqadapt[2*DAALAPVQ_NUM_ADAPTS*DAALA_NBSIZES];
+
+ int16_t qmatrix[DAALA_QM_BUFFER_SIZE];
+ int16_t qmatrix_inv[DAALA_NBSIZES*2*DAALA_QM_BSIZE];
+} DaalaPVQ;
+
+/* Index for packed quantization matrices */
+static av_always_inline int daalapvq_get_qm_idx(enum DaalaBsize bsize, int band)
+{
+ return bsize*bsize + bsize + band - band/3;
+}
+
+/* Computes theta quantization range */
+static av_always_inline int daalapvq_calc_theta_quant(double gain, double beta)
+{
+ if (gain < 1.4f)
+ return 1;
+ return lrint((gain*M_PI)/(2.0f*beta));
+}
+
+/* Dequantizes the theta angle */
+static av_always_inline double daalapvq_dequant_theta(int t, int max)
+{
+ if (!max)
+ return 0;
+ return FFMIN(t, max - 1)*0.5f*M_PI/max;
+}
+
+static inline void daalapvq_householder_a(double *x, const double *r, int n)
+{
+ int i;
+ double projection = 0.0f, e = 0.0f;
+ for (i = 0; i < n; i++) {
+ e += r[i]*r[i];
+ projection += r[i]*x[i];
+ }
+ projection *= 2.0f/(1e-100 + e);
+ for (i = 0; i < n; i++)
+ x[i] -= r[i]*projection;
+}
+
+static inline int daalapvq_householder_c(double *r, int n, double gr, int *sign)
+{
+ int i, s, m = 0;
+ double maxr = 0.0f;
+ for (i = 0; i < n; i++) {
+ if (fabs(r[i]) > maxr) {
+ maxr = fabs(r[i]);
+ m = i;
+ }
+ }
+ s = r[m] > 0 ? 1 : -1;
+ r[m] += gr*s;
+ *sign = s;
+ return m;
+}
+
+static inline int daalapvq_decode_gain_interleaved(int x, int ref_gain)
+{
+ if (x < 2*ref_gain - 1) {
+ if (x & 1)
+ return ref_gain - 1 - (x >> 1);
+ else
+ return ref_gain + (x >> 1);
+ }
+ return x + 1;
+}
+
+static av_always_inline double daalapvq_gain_root(double g, int q0, double beta)
+{
+ if (beta == 1.0f)
+ return g/q0;
+ return DAALAPVQ_COMPAND_SCALE*pow(g*DAALAPVQ_COMPAND_SCALE_1, 1.0f/beta)/q0;
+}
+
+static inline double daalapvq_gain_raise(double cg, int q0, double beta)
+{
+ if (beta == 1.0f)
+ return cg*q0;
+ else if (beta == 1.5f) {
+ cg *= q0*DAALAPVQ_COMPAND_SCALE_1;
+ return DAALAPVQ_COMPAND_SCALE*cg*sqrt(cg);
+ }
+ return DAALAPVQ_COMPAND_SCALE*pow(cg*q0*DAALAPVQ_COMPAND_SCALE_1, beta);
+}
+
+static inline double daalapvq_compute_gain(dctcoef *x, int n, int q0, double *g,
+ double beta, const int16_t *qmatrix)
+{
+ int i;
+ double acc = 0.0f;
+ for (i = 0; i < n; i++) {
+ const double tmp = x[i]*(double)qmatrix[i]*DAALA_QM_SCALE_UNIT;
+ acc += tmp*tmp;
+ }
+ *g = sqrt(acc);
+ return daalapvq_gain_root(*g, q0, beta);
+}
+
+static inline int daalapvq_compute_k(double qcg, int itheta, double theta,
+ int has_ref, int n, double beta, int robust)
+{
+ if (!has_ref) {
+ if (!qcg)
+ return 0;
+ if (n == 15 && qcg == 1.0f && beta > 1.25f)
+ return 1;
+ else
+ return FFMAX(1, lrint((qcg - 0.2f)*sqrt((n+3)/2)/beta));
+ }
+ if (!itheta)
+ return 0;
+ if (robust)
+ return FFMAX(1, lrint((itheta - 0.2f)*sqrt((n + 2)/2)));
+ else
+ return FFMAX(1, lrint((qcg*sin(theta) - 0.2f)*sqrt((n + 2)/2)/beta));
+}
+
+
+static inline void daalapvq_synth(dctcoef *xcoeff, dctcoef *ypulse, dctcoef *ref,
+ int n, double gr, uint8_t ref_p, double gain,
+ double theta, const int16_t *qmatrix,
+ const int16_t *qmatrix_inv)
+{
+ int i, m, nn = n - ref_p, s = 0, yy = 0;
+ double scale, r[DAALAPVQ_MAX_PART_SIZE], x[DAALAPVQ_MAX_PART_SIZE];
+ if (ref_p) {
+ for (i = 0; i < n; i++)
+ r[i] = ref[i]*qmatrix[i]*DAALA_QM_SCALE_UNIT;
+ }
+ m = !ref_p ? 0 : daalapvq_householder_c(r, n, gr, &s);
+ for (i = 0; i < nn; i++)
+ yy += ypulse[i]*ypulse[i];
+ scale = !yy ? 0 : gain/sqrt(yy);
+ if (!ref_p) {
+ for (i = 0; i < n; i++)
+ xcoeff[i] = lrint((ypulse[i]*scale)*(qmatrix_inv[i]*DAALA_QM_INV_SCALE_UNIT));
+ } else {
+ scale *= sin(theta);
+ for (i = 0; i < m; i++)
+ x[i] = ypulse[i]*scale;
+ x[m] = -s*gain*cos(theta);
+ for (i = m; i < nn; i++)
+ x[i+1] = ypulse[i]*scale;
+ daalapvq_householder_a(x, r, n);
+ for (i = 0; i < n; i++)
+ xcoeff[i] = lrint(x[i]*qmatrix_inv[i]*DAALA_QM_INV_SCALE_UNIT);
+ }
+}
+
+static av_always_inline void daalapvq_adapt_shuffle(int *dst, int *src, int spd,
+ int idx, int mul)
+{
+ if (src[idx] < 1)
+ return;
+ dst[idx+0] += (mul*src[idx+0] - dst[idx+0]) >> spd;
+ dst[idx+1] += ( src[idx+1] - dst[idx+1]) >> spd;
+}
+
+static inline void daalapvq_decode_codeword(DaalaEntropy *e, DaalaPVQ *pvq,
+ dctcoef *y, int n, int k, uint8_t has_ref,
+ enum DaalaBsize bsize)
+{
+ int pos, adapt_curr[DAALAPVQ_NUM_ADAPTS] = {0};
+ int *pvq_adapt = pvq->pvqadapt + 4*(2*bsize + !has_ref);
+ const int cdf_id = 2*(n == 15) + !!has_ref, spd = 5;
+ if (k == 1 && n < 16) {
+ pos = daalaent_decode_cdf_adapt(e, &pvq->pvqcodeword_cdf, cdf_id, n - !!has_ref);
+ memset(y, 0, n*sizeof(dctcoef));
+ y[pos] = daalaent_cphase(e, 1);
+ } else {
+ daalaent_decode_laplace_vector(e, y, n - !!has_ref, k, adapt_curr, pvq_adapt);
+ daalapvq_adapt_shuffle(pvq_adapt, adapt_curr, spd, DAALAENT_PVQ_K, 256);
+ daalapvq_adapt_shuffle(pvq_adapt, adapt_curr, spd, DAALAENT_PVQ_COUNT, 1);
+ }
+}
+
+static inline void daalapvq_decode_vector(DaalaEntropy *e, DaalaPVQ *pvq,
+ dctcoef *out, dctcoef *ref,
+ const double beta,
+ uint8_t key_frame, int p,
+ uint8_t *skip_rest,
+ uint8_t has_err, int band_idx,
+ int qm_off, enum DaalaBsize bsize)
+{
+ int i, k;
+ int qg = 0, skip = 0, itheta = (!!key_frame), has_ref = !key_frame;
+ double qcg, gain, theta = 0.0f, gr = 0.0f, gain_off = 0.0f;
+ dctcoef tmp[DAALAPVQ_MAX_PART_SIZE] = {0};
+
+ const int robust = has_err || key_frame;
+ const int band_len = pvq->size[band_idx];
+ const int16_t *qmatrix = &pvq->qmatrix[qm_off];
+ const int16_t *qmatrix_inv = &pvq->qmatrix_inv[qm_off];
+
+ if (!skip_rest[(band_idx + 2) % 3]) {
+ int iloc = (!!p)*DAALA_NBSIZES*DAALAPVQ_PARTITIONS_MAX + bsize*DAALAPVQ_PARTITIONS_MAX + band_idx;
+ i = daalaent_decode_cdf_adapt(e, &pvq->pvqtheta_gain_cdf, iloc, 8 + 7*pvq->skip[band_idx]);
+ if (!key_frame && i >= 10)
+ i++;
+ if (key_frame && i >= 8)
+ i++;
+ if (i >= 8) {
+ i -= 8;
+ skip_rest[0] = skip_rest[1] = skip_rest[2] = 1;
+ }
+ qg = i & 1;
+ itheta = (i >> 1) - 1;
+ has_ref = !(itheta == -1);
+ }
+ if (qg) {
+ int *ex = pvq->pvqgain_ex[p][bsize] + band_idx, ex_tmp = *ex;
+ DaalaCDF *mcdf = has_ref ? &pvq->pvqgain_ref_mcdf : &pvq->pvqgain_noref_mcdf;
+ qg = 1 + daalaent_decode_generic(e, mcdf, &ex_tmp, -1, 2);
+ *ex += ((qg << 16) - *ex) >> 2;
+ }
+ if (has_ref) {
+ int icgr, max_theta;
+ const int cfl_enabled = p && key_frame;
+ double cgr = daalapvq_compute_gain(ref, band_len, pvq->qmax[band_idx],
+ &gr, beta, qmatrix);
+ if (cfl_enabled)
+ cgr = 1.0f;
+ icgr = lrint(cgr);
+ qg = daalapvq_decode_gain_interleaved(qg, icgr + !key_frame) - !key_frame;
+ if (!qg && !key_frame)
+ skip = (icgr ? DAALAPVQ_SKIP_ZERO : DAALAPVQ_SKIP_COPY);
+ if (qg == icgr && !itheta && !cfl_enabled)
+ skip = DAALAPVQ_SKIP_COPY;
+ gain_off = cgr - icgr;
+ qcg = qg + gain_off;
+ max_theta = daalapvq_calc_theta_quant(qcg, beta);
+ if (itheta > 1 && (robust || max_theta > 3)) {
+ const int off = bsize*DAALAPVQ_PARTITIONS_MAX + band_idx;
+ int ex_tmp = pvq->pvqtheta_ex[off];
+ itheta = 2 + daalaent_decode_generic(e, &pvq->pvqtheta_mcdf, &ex_tmp,
+ robust ? -1 : max_theta - 3, 2);
+ pvq->pvqtheta_ex[off] += ((itheta << 16) - pvq->pvqtheta_ex[off]) >> 2;
+ }
+ theta = daalapvq_dequant_theta(itheta, max_theta);
+ } else {
+ itheta = 0;
+ qg += !key_frame;
+ qcg = qg;
+ skip = !qg ? DAALAPVQ_SKIP_ZERO : skip;
+ }
+
+ k = daalapvq_compute_k(qcg, itheta, theta, has_ref, band_len, beta, robust);
+ if (k)
+ daalapvq_decode_codeword(e, pvq, tmp, band_len, k, has_ref, bsize);
+
+ if (pvq->phase && has_ref) {
+ const int8_t phase = daalaent_cphase(e, 1);
+ for (i = 0; i < pvq->nb_coeffs; i++)
+ ref[i] *= phase;
+ pvq->phase = 0;
+ }
+
+ if (skip) {
+ if (skip == DAALAPVQ_SKIP_COPY)
+ memcpy(out, ref, band_len*sizeof(dctcoef));
+ else
+ memset(out, 0, band_len*sizeof(dctcoef));
+ return;
+ }
+ gain = daalapvq_gain_raise(qg + gain_off, pvq->qmax[band_idx], beta);
+ daalapvq_synth(out, tmp, ref, band_len, gr, has_ref, gain, theta, qmatrix,
+ qmatrix_inv);
+}
+
+/* q: quantizer, qm = bitstream_header->pvq_qm[p] */
+static inline void daalapvq_decode(DaalaEntropy *e, DaalaPVQ *pvq,
+ DaalaBitstreamHeader *b, dctcoef *ref,
+ dctcoef *out, int q, uint8_t *pvq_qm, int p,
+ enum DaalaBsize bsize, const double *beta,
+ int qm_off, int bskip)
+{
+ int i, j;
+ int bands = daala_layouts[bsize].band_offset_size;
+ const int *off = daala_layouts[bsize].band_offset;
+ uint8_t skip_rest[3] = {0};
+ out[0] = bskip & 1;
+ if (!(bskip >> 1)) {
+ for (i = 1; i < 1 << (2*bsize + 4); i++)
+ out[i] = b->key_frame ? 0 : ref[i];
+ } else {
+ pvq->nb_coeffs = off[bands];
+ pvq->phase = p && b->key_frame;
+ for (i = 0; i < bands; i++) {
+ pvq->size[i] = off[i+1] - off[i];
+ pvq->skip[i] = (!i && (i < bands - 1));
+ pvq->qmax[i] = FFMAX(1, q*pvq_qm[daalapvq_get_qm_idx(bsize, i + 1)] >> 4);
+ }
+ for (i = 0; i < bands; i++) {
+ daalapvq_decode_vector(e, pvq, out+off[i], ref+off[i], beta[i], b->key_frame,
+ p, skip_rest, 1, i, qm_off + off[i], bsize);
+ if (!i && !skip_rest[0] && bsize) {
+ int sdir = daalaent_decode_cdf_adapt(e, &pvq->pvqskip_cdf,
+ !!p + 2*(bsize - 1), 7);
+ for (j = 0; j < 3; j++)
+ skip_rest[j] = !!(sdir & (1 << j));
+ }
+ }
+ }
+}
+
+#endif /* AVCODEC_DAALAPVQ_H */
diff --git a/libavcodec/daala_utils.h b/libavcodec/daala_utils.h
new file mode 100644
index 0000000..9c3d5fe
--- /dev/null
+++ b/libavcodec/daala_utils.h
@@ -0,0 +1,202 @@
+/*
+ * Copyright 2001-2015 Xiph.Org and contributors.
+ * Copyright 2015 Rostislav Pehlivanov <atomnuker at gmail.com>
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+ * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef AVCODEC_DAALAUTIL_H
+#define AVCODEC_DAALAUTIL_H
+
+#include "daala.h"
+
+#define DAALA_BSIZE8x8(barr, bstride, bx, by) \
+ ((barr)[(by)*(bstride) + (bx)])
+#define DAALA_BSIZE4x4(barr, bstride, bx, by) \
+ DAALA_BSIZE8x8(barr, bstride, (bx) >> 1, (by) >> 1)
+
+static av_always_inline int daala_get_qm_idx(enum DaalaBsize bsize, int b)
+{
+ return bsize*(bsize + 1) + b - b/3;
+}
+
+static av_always_inline int daala_qm_get_offset(enum DaalaBsize bsize, int dec)
+{
+ return DAALA_QM_BSIZE*(bsize*2 + dec);
+}
+
+#define BAND_FROM_RASTER(type) \
+static inline void daala_band_from_raster_##type(const DaalaBandLayout *layout,\
+ type *dst, const type *src, \
+ int stride) \
+{ \
+ int i; \
+ for (i = 0; i < layout->tab_size; i++) { \
+ dst[i] = src[layout->tab[i][1]*stride + layout->tab[i][0]]; \
+ } \
+}
+
+BAND_FROM_RASTER(int16_t)
+BAND_FROM_RASTER(int32_t)
+
+#define RASTER_TO_CODING(type) \
+ static inline void daala_raster_to_coding_##type(type *dst, int n, \
+ const type *src, \
+ int stride) \
+{ \
+ int bs; \
+ daala_band_from_raster_##type(&daala_layouts[0], dst + 1, src, stride); \
+ for (bs = 1; bs < DAALA_NBSIZES; bs++) { \
+ const type size = 1 << (DAALA_LOG_BSIZE0 + bs); \
+ const type offset = 1 << 2*(DAALA_LOG_BSIZE0 - 1 + bs); \
+ if (n >= size) { \
+ daala_band_from_raster_##type(&daala_layouts[bs], dst + offset, \
+ src, stride); \
+ } \
+ } \
+ dst[0] = src[0]; \
+}
+
+RASTER_TO_CODING(int16_t)
+RASTER_TO_CODING(int32_t)
+
+static inline void daala_raster_to_coding(dctcoef *dst, int n, const dctcoef *src,
+ int stride)
+{
+ daala_raster_to_coding_int32_t(dst, n, src, stride);
+}
+
+static inline void daala_raster_from_band(const DaalaBandLayout *layout, dctcoef *dst,
+ int stride, const dctcoef *src)
+{
+ int i;
+ for (i = 0; i < layout->tab_size; i++) {
+ dst[layout->tab[i][1]*stride + layout->tab[i][0]] = src[i];
+ }
+}
+
+static inline void daala_coding_to_raster(dctcoef *dst, int stride, const dctcoef *src, int n)
+{
+ enum DaalaBsize bs;
+ daala_raster_from_band(&daala_layouts[0], dst, stride, src + 1);
+ for (bs = 1; bs < DAALA_NBSIZES; bs++) {
+ const int size = 1 << (DAALA_LOG_BSIZE0 + bs);
+ const int offset = 1 << 2*(DAALA_LOG_BSIZE0 - 1 + bs);
+ if (n >= size) {
+ daala_raster_from_band(&daala_layouts[bs], dst, stride, src + offset);
+ }
+ }
+ dst[0] = src[0];
+}
+
+static inline void daala_init_qmatrix(int16_t *qmatrix, int16_t *qmatrix_inv, int qm_idx)
+{
+ int i, j ,dec;
+ int16_t tmp[DAALA_QM_BSIZE] = {0}, tmp_inv[DAALA_QM_BSIZE] = {0};
+ const int16_t *qm_mode = daala_qmatrices[qm_idx];
+ enum DaalaBsize bsize;
+ for (bsize = 0; bsize < DAALA_NBSIZES; bsize++) {
+ for (dec = 0; dec < 2; dec++) {
+ const int off = daala_qm_get_offset(bsize, dec);
+ int16_t *qm = qmatrix + off;
+ int16_t *qm_inv = qmatrix_inv + off;
+ for (i = 0; i < 4 << bsize; i++) {
+ for (j = 0; j < 4 << bsize; j++) {
+ double val;
+ double mag = 1.0f;
+ if (i || j) {
+ mag = daala_basis_qm_mag[dec][bsize][i]*daala_basis_qm_mag[dec][bsize][j];
+ mag /= 0.0625f*qm_mode[(i << 1 >> bsize)*8 + (j << 1 >> bsize)];
+ }
+ val = FFMIN((double)DAALA_QM_SCALE_MAX, round(mag*DAALA_QM_SCALE));
+ tmp[i*(4 << bsize) + j] = (int16_t)val;
+ val = round(DAALA_QM_SCALE*DAALA_QM_INV_SCALE/val);
+ tmp_inv[i*(4 << bsize) + j] = (int16_t)val;
+ }
+ }
+ daala_raster_to_coding_int16_t(qm, 4 << bsize, tmp, 4 << bsize);
+ daala_raster_to_coding_int16_t(qm_inv, 4 << bsize, tmp_inv, 4 << bsize);
+ }
+ }
+}
+
+static inline void daala_safe_free_2d_array(void **arr, int x)
+{
+ int i;
+ if (!arr)
+ return;
+ for (i = 0; i < x; i++)
+ av_free(arr[i]);
+ av_freep(&arr);
+}
+#define DAALA_FREE_2D_ARRAY(arr, xlen)\
+ daala_safe_free_2d_array((void **)arr, xlen);
+
+/* Will free any successfull allocations before the failed one */
+static inline void **daala_safe_alloc_2d_array(int x, int y, unsigned int vsize)
+{
+ int i;
+ void **arr = av_calloc(x, sizeof(void *));
+ if (!arr)
+ return NULL;
+ for (i = 0; i < x; i++) {
+ arr[i] = av_calloc(y, vsize);
+ if (!arr[i]) {
+ if (i > 0)
+ DAALA_FREE_2D_ARRAY(arr, i);
+ return NULL;
+ }
+ }
+ return arr;
+}
+#define DAALA_ALLOC_2D_ARRAY_GOTO(arr, xlen, ylen, vtype, label)\
+ if (!(arr = (vtype **)daala_safe_alloc_2d_array(xlen, ylen, sizeof(vtype)))) goto label;
+
+static inline void daala_safe_free_static_2d_array(void **arr[], int elem, int x)
+{
+ int i;
+ for (i = 0; i < elem; i++)
+ DAALA_FREE_2D_ARRAY(arr[i], x);
+}
+#define DAALA_FREE_STATIC_2D_ARRAY(arr, elem, xlen)\
+ daala_safe_free_static_2d_array((void ***)arr, elem, xlen);
+
+static inline int daala_safe_alloc_static_2d_array(void **arr[], int elem, int x,
+ int y, unsigned int vsize)
+{
+ int i;
+ for (i = 0; i < elem; i++) {
+ arr[i] = daala_safe_alloc_2d_array(x, y, vsize);
+ if (!arr[i]) {
+ if (i > 0)
+ DAALA_FREE_STATIC_2D_ARRAY(&arr[i], i, x);
+ return 1;
+ }
+ }
+ return 0;
+}
+#define DAALA_ALLOC_STATIC_2D_ARRAY_GOTO(arr, elem, xlen, ylen, vtype, label)\
+ if (daala_safe_alloc_static_2d_array((void ***)arr, elem, xlen, ylen, sizeof(vtype))) goto label;
+
+#endif /* AVCODEC_DAALAUTIL_H */
diff --git a/libavcodec/daaladec.c b/libavcodec/daaladec.c
new file mode 100644
index 0000000..3501a6b
--- /dev/null
+++ b/libavcodec/daaladec.c
@@ -0,0 +1,804 @@
+/*
+ * Copyright 2001-2015 Xiph.Org and contributors.
+ * Copyright 2015 Rostislav Pehlivanov <atomnuker at gmail.com>
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+ * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "daala.h"
+#include "avcodec.h"
+#include "get_bits.h"
+#include "bytestream.h"
+#include "internal.h"
+
+#include "daalatab.h"
+#include "daala_entropy.h"
+#include "daala_utils.h"
+#include "daala_pvq.h"
+#include "daaladsp.h"
+
+#define DERING_SKIP 255
+
+typedef struct DaalaContext {
+ DaalaSharedContext s;
+ DaalaEntropy e;
+ DaalaPVQ pvq;
+ DaalaDSP dsp;
+
+ int last_qm;
+ int quantizer[DAALA_MAX_PLANES];
+
+ /* Picture constants */
+ enum AVPictureType type;
+ int width, height; /* Scaled avctx->width/height! */
+ const struct DaalaPixFmts *fmt;
+ int sbn_x, sbn_y; /* Number of x and y superblocks */
+
+ /* Block sizes array */
+ enum DaalaBsize *bsizes;
+ int bsizes_stride;
+
+ /* Density functions */
+ DaalaCDF skip_cdf;
+ DaalaCDF dering_cdf;
+ DaalaCDF q_scale_cdf;
+ DaalaCDF haar_coef_cdf; /* Split coef Haar CDF */
+ DaalaCDF haar_split_cdf; /* Tree Splitting Haar CDF */
+ DaalaCDF haar_bit_cdf; /* Bitstream Haar CDF */
+ DaalaCDF haar_dc_mcdf[DAALA_MAX_PLANES]; /* Haar DC lvl. CDF (gen. model) */
+
+ /* Expectation values? We Dirac now! */
+ int haar_sb_ex[DAALA_MAX_PLANES];
+ int haar_dc_ex[DAALA_MAX_PLANES][DAALA_NBSIZES][3];
+
+ /* Buffers */
+ dctcoef **haar_dc_buf[DAALA_MAX_PLANES]; /* planes X sbn_x X sbn_y */
+ dctcoef *ccoef[DAALA_MAX_PLANES];
+ dctcoef *dcoef[DAALA_MAX_PLANES];
+ dctcoef *lcoef[DAALA_MAX_PLANES];
+
+ uint8_t **dering; /* 2D array, sbn_x by sbn_y big */
+ uint8_t **q_scale;
+} DaalaContext;
+
+/* Sets the AVFrame type */
+static void set_frame_type(DaalaBitstreamHeader *b, AVFrame *frame)
+{
+ frame->key_frame = b->key_frame;
+ frame->pict_type = frame->key_frame ? AV_PICTURE_TYPE_I : !b->bipred ?
+ AV_PICTURE_TYPE_P : AV_PICTURE_TYPE_B;
+}
+
+/* Fetch motion vectors */
+static void get_motion_vectors(DaalaContext *s)
+{
+ if (s->s.h.key_frame)
+ return;
+}
+
+/* Fetch and decode per plane quantizers */
+static void get_plane_quantizers(DaalaContext *s)
+{
+ int i;
+ for (i = 0; i < s->fmt->planes; i++) {
+ int qcode = daalaent_decode_uint(&s->e, daala_quant_codemap_size);
+ if (!qcode) {
+ s->quantizer[i] = 0;
+ continue;
+ }
+ s->quantizer[i] = qcode < daala_quant_codemap_size ?
+ (daala_quant_codemap[qcode] << DAALA_CSHIFT >> 4) :
+ (daala_quant_codemap[daala_quant_codemap_size-1] << DAALA_CSHIFT >> 4);
+ }
+}
+
+typedef struct HaarGradient {
+ int x, y;
+} HaarGradient;
+
+/* Get DC level */
+static void get_haar_dc_sb(DaalaContext *s, HaarGradient *g, dctcoef *d, int x, int y,
+ uint8_t p, uint8_t lim_pass)
+{
+ int q, q_dc;
+ int xdec = s->fmt->dec[p][0];
+ const int aw = s->width >> xdec;
+ const int ln = DAALA_LOG_BSIZE_MAX - xdec;
+ dctcoef dc_pred = 0, **dc_buf = s->haar_dc_buf[p];
+ if (!s->quantizer[p])
+ q_dc = 1;
+ else
+ q_dc = FFMAX(1, s->quantizer[p]*s->s.h.pvq_qm[p][daala_get_qm_idx(DAALA_NBSIZES - 1, 0)] >> 4);
+ if (x > 0 && y > 0) {
+ if (lim_pass) { /* ALERT: coeffs could change */
+ dc_pred = 22*dc_buf[x-1][y-0] - 9*dc_buf[x-1][y-1] +
+ 15*dc_buf[x+0][y-1] + 4*dc_buf[x+1][y-1];
+ } else {
+ dc_pred = 23*dc_buf[x-1][y-0] - 10*dc_buf[x-1][y-1] + 19*dc_buf[x-0][y-1];
+ }
+ dc_pred = (dc_pred + 16) >> 5;
+ } else {
+ dc_pred += x > 0 ? dc_buf[x-1][y-0] : 0;
+ dc_pred += y > 0 ? dc_buf[x-0][y-1] : 0;
+ }
+ q = daalaent_decode_generic(&s->e, &s->haar_dc_mcdf[p], &s->haar_sb_ex[p], -1, 2);
+ q *= daalaent_cphase(&s->e, q);
+ q = q*q_dc + dc_pred;
+ d[(y << ln)*aw + (x << ln)] = q;
+ dc_buf[x][y] = q;
+ g->x = x > 0 ? dc_buf[x-1][y-0] - q : g->x;
+ g->y = y > 0 ? dc_buf[x-0][y-1] - q : g->y;
+}
+
+/* Get DC level for sub-blocks */
+static void get_haar_dc_lvl(DaalaContext *s, HaarGradient *g, int x, int y,
+ uint8_t p, enum DaalaBsize bsize)
+{
+ int i, q_dc, q_ac[2];
+ int xdec = s->fmt->dec[p][0];
+ const int aw = s->width >> xdec;
+ const int ln = bsize - xdec + 2;
+ dctcoef *dcoef = s->dcoef[p], coef[4];
+ if (!s->quantizer[p]) {
+ q_dc = 1;
+ q_ac[0] = 1;
+ q_ac[1] = 1;
+ } else {
+ q_dc = FFMAX(1, s->quantizer[p]*s->s.h.pvq_qm[p][daala_get_qm_idx(DAALA_NBSIZES - 1, 0)] >> 4);
+ q_ac[0] = (q_dc*daala_dc_comp[bsize - xdec][0] + 8) >> 4;
+ q_ac[1] = (q_dc*daala_dc_comp[bsize - xdec][1] + 8) >> 4;
+ }
+ coef[0] = dcoef[(y << ln)*aw + (x << ln)];
+ for (i = 1; i < 4; i++) {
+ int *ex = &s->haar_dc_ex[p][bsize][i-1];
+ int q = daalaent_decode_generic(&s->e, &s->haar_dc_mcdf[p], ex, -1, 2);
+ if (q) {
+ q *= daalaent_cphase(&s->e, q);
+ }
+ coef[i] = q*q_ac[i == 3];
+ }
+ g->x = coef[1] += g->x/5;
+ g->y = coef[2] += g->y/5;
+ daala_haar_kern(&coef[0], &coef[1], &coef[2], &coef[3]);
+ dcoef[(y << ln)*aw + (x << ln)] = coef[0];
+ dcoef[(y << ln)*aw + ((x + 1) << ln)] = coef[1];
+ dcoef[((y + 1) << ln)*aw + (x << ln)] = coef[2];
+ dcoef[((y + 1) << ln)*aw + ((x + 1) << ln)] = coef[3];
+}
+
+/* Quantizer scaling - decoded data not currently used */
+static av_unused inline void decode_quantizer_scaling(DaalaContext *s, int x, int y,
+ uint8_t skip)
+{
+ int q_scale = 0;
+ if (!skip) {
+ int u = y > 0 ? s->q_scale[x][y-1] : 0;
+ int l = x > 0 ? s->q_scale[x-1][y] : 0;
+ q_scale = daalaent_decode_cdf_adapt(&s->e, &s->q_scale_cdf, u + l*4, 4);
+ }
+ s->q_scale[x][y] = q_scale;
+}
+
+static void daala_calc_prediction(DaalaContext *s, dctcoef *pred, const dctcoef *d,
+ int x, int y, int p, enum DaalaBsize bsize)
+{
+ int n = 1 << (bsize + DAALA_LOG_BSIZE0);
+ int aw = s->width >> s->fmt->dec[p][0];
+ int off = ((y << DAALA_LOG_BSIZE0))*aw + (x << DAALA_LOG_BSIZE0);
+ if (s->s.h.key_frame) {
+ if (!p || s->s.h.haar) {
+ memset(pred, 0, n*n*sizeof(dctcoef));
+ if (!p && !s->s.h.haar && s->dsp.intrapred) {
+ s->dsp.intrapred((uint8_t *)pred, (uint8_t *)&d[off], aw, x, y,
+ s->bsizes, s->bsizes_stride, bsize);
+ }
+ } else {
+ dctcoef *l = s->lcoef[p];
+ for (y = 0; y < n; y++) {
+ for (x = 0; x < n; x++) {
+ pred[n*y + x] = l[n*y + x];
+ }
+ }
+ }
+ } else {
+ /* Copy from mv coeffs */
+ }
+}
+
+static inline int decode_haar_coeff_tree_split(DaalaContext *s, int sum, int ctx,
+ uint8_t coef_split)
+{
+ int a = 0, shift, offset;
+ DaalaCDF *cdf = coef_split ? &s->haar_coef_cdf : &s->haar_split_cdf;
+ if (!sum)
+ return 0;
+ if ((shift = FFMAX(0, daalaent_log2(sum) - 4)))
+ a = daalaent_decode_bits(&s->e, shift);
+ if (coef_split)
+ offset = 15*ctx + (sum >> shift) - 1;
+ else
+ offset = 15*(2*ctx + FFMIN(shift, 1)) + (sum >> shift) - 1;
+ a += daalaent_decode_cdf_adapt(&s->e, cdf, offset,
+ (sum >> shift) + 1) << shift;
+ if (a > sum) {
+ a = sum;
+ s->e.err = 1;
+ }
+ return a;
+}
+
+static void decode_tree_sum(DaalaContext *s, dctcoef *pred, int x, int y,
+ dctcoef sum_t, const int shift, const int dir)
+{
+ dctcoef c_sum, t_sum, sub[2][2], n = 1 << shift;
+ const int offset = dir + 3*(daalaent_log2(FFMAX(x,y)) - 1);
+ if (!sum_t)
+ return;
+ pred[y*n + x] = decode_haar_coeff_tree_split(s, sum_t, offset, 1);
+ if ((c_sum = sum_t - pred[y*n + x])) {
+ uint32_t *ele_1 = &sub[!!dir][!dir], *ele_2 = &sub[!dir][!!dir];
+ t_sum = decode_haar_coeff_tree_split(s, c_sum, !!dir, 0);
+ sub[0][0] = decode_haar_coeff_tree_split(s, t_sum, 2, 0);
+ *ele_1 = t_sum - sub[0][0];
+ *ele_2 = decode_haar_coeff_tree_split(s, c_sum - t_sum, 2, 0);
+ sub[1][1] = c_sum - t_sum - *ele_2;
+ } else {
+ memset(&sub[0][0], 0, 4*sizeof(dctcoef));
+ }
+ if (4*x < n && 4*y < n) {
+ decode_tree_sum(s, pred, 2*x, 2*y, sub[0][0], shift, dir);
+ decode_tree_sum(s, pred, 2*x + 1, 2*y, sub[0][1], shift, dir);
+ decode_tree_sum(s, pred, 2*x, 2*y + 1, sub[1][0], shift, dir);
+ decode_tree_sum(s, pred, 2*x + 1, 2*y + 1, sub[1][1], shift, dir);
+ return;
+ }
+ pred[2*y*n + 2*x] = sub[0][0];
+ pred[2*y*n + 2*x + 1] = sub[0][1];
+ pred[(2*y + 1)*n + 2*x] = sub[1][0];
+ pred[(2*y + 1)*n + 2*x + 1] = sub[1][1];
+}
+
+/* Haar block decoding and transform */
+static void decode_block_haar(DaalaContext *s, int x, int y, int p, enum DaalaBsize bsize)
+{
+ int i, j, k, l, n = 1 << (bsize + 2);
+ const int dx = x << bsize, dy = y << bsize;
+ const int aw = s->width >> s->fmt->dec[p][0];
+ const int boffset = (dy << 2)*aw + (dx << 2);
+
+ dctcoef tree[4][4];
+ dctcoef pred[DAALA_BSIZE_MAX*DAALA_BSIZE_MAX];
+ dctcoef tpred[DAALA_BSIZE_MAX*DAALA_BSIZE_MAX];
+
+ daala_calc_prediction(s, pred, s->dcoef[p], dx, dy, p, bsize);
+ memcpy(tpred, pred, n*n*sizeof(dctcoef));
+
+ tree[0][0] = daalaent_decode_cdf_adapt(&s->e, &s->haar_bit_cdf, p, 16);
+ if (tree[0][0] == 15)
+ tree[0][0] += daalaent_decode_unary(&s->e);
+
+ if (tree[0][0] > 24) {
+ s->e.err = 1;
+ return;
+ } else if (tree[0][0] > 1) {
+ int tmp = daalaent_decode_bits(&s->e, tree[0][0] - 1);
+ tree[0][0] = (1 << (tree[0][0] - 1)) | tmp;
+ }
+
+ tree[1][1] = decode_haar_coeff_tree_split(s, tree[0][0], 3, 0);
+ tree[0][1] = decode_haar_coeff_tree_split(s, tree[0][0] - tree[1][1], 4, 0);
+ tree[1][0] = tree[0][0] - tree[1][1] - tree[0][1];
+
+ decode_tree_sum(s, pred, 1, 0, tree[0][1], bsize + 2, 0);
+ decode_tree_sum(s, pred, 0, 1, tree[1][0], bsize + 2, 1);
+ decode_tree_sum(s, pred, 1, 1, tree[1][1], bsize + 2, 2);
+
+ for (i = 0; i < n; i++) {
+ for (j = (i == 0); j < n; j++)
+ pred[i*n + j] *= daalaent_cphase(&s->e, pred[i*n + j]);
+ }
+ for (i = 0; i < 3; i++) { /* Direction */
+ for (j = 0; j < bsize+2; j++) { /* Level */
+ int bo = (((i + 1) >> 1) << j)*n + (((i + 1) & 1) << j);
+ int q = !s->quantizer[p] ? 1 : s->quantizer[p]*daala_haar_qm[i == 2][j] >> 4;
+ for (k = 0; k < 1 << j; k++)
+ for (l = 0; l < 1 << j; l++)
+ pred[bo + k*n + l] = q*pred[bo + k*n + l] + tpred[bo + k*n + l];
+ }
+ }
+
+ /* DC level */
+ pred[0] = s->dcoef[p][boffset];
+
+ for (i = 0; i < n; i++) {
+ for (j = 0; j < n; j++)
+ s->dcoef[p][boffset + i*aw + j] = pred[i*n + j];
+ }
+
+ /* IDWT */
+ if (s->dsp.idwt)
+ s->dsp.idwt((uint8_t *)(s->ccoef[p] + boffset), aw,
+ (uint8_t *)(s->dcoef[p] + boffset), aw, bsize+2);
+}
+
+static void init_skipped(dctcoef *d, int ostride, dctcoef *pred, int dstride,
+ int key_frame)
+{
+ int i, j;
+ const int n = dstride;
+ if (key_frame) {
+ for (i = 0; i < n; i++) {
+ for (j = 0; j < n; j++) {
+ if (i || j)
+ d[i*ostride + j] = 0;
+ }
+ }
+ } else {
+ for (i = 0; i < n; i++) {
+ for (j = 0; j < n; j++) {
+ d[i*ostride + j] = pred[i*n + j];
+ }
+ }
+ }
+}
+
+/* PVQ decoding and transform */
+static inline void decode_block_pvq(DaalaContext *s, int x, int y, int p,
+ enum DaalaBsize bsize, uint8_t skip)
+{
+ const int sx = x << bsize;
+ const int sy = y << bsize;
+ const int n = 1 << (bsize + 2);
+ const int aw = s->width >> s->fmt->dec[p][0];
+ const int boffset = (sy << 2)*aw + (sx << 2);
+ const int qm = FFMAX(1, s->quantizer[p]);
+ const int qm_off = daala_qm_get_offset(bsize, s->fmt->dec[p][0]);
+ const double *beta = daala_pvq_beta[s->s.h.act_mask][p][bsize];
+
+ dctcoef *d = s->dcoef[p];
+ dctcoef pred[DAALA_BSIZE_MAX*DAALA_BSIZE_MAX];
+ dctcoef tpred[DAALA_BSIZE_MAX*DAALA_BSIZE_MAX];
+
+ daala_calc_prediction(s, pred, d, sx, sy, p, bsize);
+
+ init_skipped(&d[boffset], aw, pred, n, s->s.h.key_frame);
+
+ daala_raster_to_coding(tpred, n, pred, n);
+
+ daalapvq_decode(&s->e, &s->pvq, &s->s.h, tpred, pred, qm, s->s.h.pvq_qm[p],
+ p, bsize, beta, qm_off, skip);
+
+ pred[0] = d[boffset];
+
+ daala_coding_to_raster(&d[boffset], aw, pred, n);
+
+ /* IDCT */
+ if (s->dsp.idct[bsize])
+ s->dsp.idct[bsize]((uint8_t *)(s->ccoef[p] + boffset), aw,
+ (uint8_t *)(s->dcoef[p] + boffset), aw);
+}
+
+/* Segments frame && decodes */
+static inline int decode_block_rec(DaalaContext *s, HaarGradient g, int x, int y,
+ uint8_t p, enum DaalaBsize bsize)
+{
+ int i, j, lc_skip, cbs;
+ const int sx = x << bsize;
+ const int sy = y << bsize;
+ const int llim = 1 << bsize;
+ const int off = 2*bsize + !!p;
+ const int bst = s->bsizes_stride;
+ const int xdec = s->fmt->dec[p][0];
+ const int aw = s->width >> xdec;
+ enum DaalaBsize obsize = DAALA_BSIZE4x4(s->bsizes, bst, sx, sy);
+
+ if (s->s.h.haar) {
+ obsize = bsize;
+ } else if (!p) {
+ lc_skip = daalaent_decode_cdf_adapt(&s->e, &s->skip_cdf, off,
+ 4 + (bsize > 0));
+ if (bsize == DAALA_NBSIZES - 1) {
+ //s->dering[x][y] = (lc_skip == 2 && !s->s.h.key_frame) ? DERING_SKIP : 0;
+ //decode_quantizer_scaling(s, x, y, lc_skip == 0);
+ }
+ obsize = lc_skip < 4 ? bsize : -1;
+ }
+
+ if ((cbs = FFMAX(obsize, xdec)) == bsize) {
+ cbs -= xdec;
+ if (!p) {
+ for (i = 0; i < llim; i++)
+ for (j = 0; j < llim; j++)
+ DAALA_BSIZE4x4(s->bsizes, bst, sx + i, sy + j) = bsize;
+ }
+ if (s->dsp.cfl) {
+ s->dsp.cfl((uint8_t *)s->lcoef[p], 1 << (cbs + DAALA_LOG_BSIZE0),
+ (uint8_t *)(s->dcoef[0] + (y << (2 + bsize))*s->width + (x << (2 + bsize))),
+ s->width, xdec, s->fmt->dec[p][1], cbs, obsize);
+ }
+ if (p && !s->s.h.haar) {
+ lc_skip = daalaent_decode_cdf_adapt(&s->e, &s->skip_cdf, off, 4);
+ }
+ if (s->s.h.haar)
+ decode_block_haar(s, x, y, p, cbs);
+ else
+ decode_block_pvq(s, x, y, p, cbs, lc_skip);
+ /* Skip dering using lc_skip here!! */
+ } else {
+ int bs = bsize - s->fmt->dec[p][0];
+ int bo = (y << (DAALA_LOG_BSIZE0 + bs))*aw + (x << (DAALA_LOG_BSIZE0 + bs));
+ int hfilter = (x + 1) << (DAALA_LOG_BSIZE0 + bs) <= s->width;
+ int vfilter = (y + 1) << (DAALA_LOG_BSIZE0 + bs) <= s->height;
+ if (!s->s.h.key_frame && s->dsp.pre_split_filter)
+ s->dsp.pre_split_filter((uint8_t *)(s->ccoef + bo), aw, bs, hfilter, vfilter);
+ if (s->s.h.key_frame)
+ get_haar_dc_lvl(s, &g, 2*x, 2*y, p, bsize - 1);
+ decode_block_rec(s, g, 2*x + 0, 2*y + 0, p, bsize - 1);
+ decode_block_rec(s, g, 2*x + 1, 2*y + 0, p, bsize - 1);
+ decode_block_rec(s, g, 2*x + 0, 2*y + 1, p, bsize - 1);
+ decode_block_rec(s, g, 2*x + 1, 2*y + 1, p, bsize - 1);
+ if (s->dsp.post_split_filter)
+ s->dsp.post_split_filter((uint8_t *)(s->ccoef[p] + bo), aw, bs, hfilter, vfilter);
+ }
+
+ return 0;
+}
+
+static void decode_blocks(DaalaContext *s, AVFrame *frame)
+{
+ int x, y, p;
+ for (y = 0; y < s->sbn_y; y++) {
+ for (x = 0; x < s->sbn_x; x++) {
+ for (p = 0; p < s->fmt->planes; p++) {
+ struct HaarGradient g = {0};
+ if (s->s.h.key_frame)
+ get_haar_dc_sb(s, &g, s->dcoef[p], x, y, p, y > 0 && x < s->sbn_x - 1);
+ decode_block_rec(s, g, x, y, p, DAALA_NBSIZES - 1);
+ }
+ }
+ }
+}
+
+/* Applies post-decode filtering */
+static void apply_filtering(DaalaContext *s, AVFrame *frame)
+{
+ int p, x, y;
+ for (p = 0; p < s->fmt->planes; p++) {
+ int xdec = s->fmt->dec[p][0];
+ int ydec = s->fmt->dec[p][1];
+ int aw = s->width >> xdec;
+ if (!s->s.h.haar && s->dsp.frame_postfilter) {
+ s->dsp.frame_postfilter((uint8_t *)s->ccoef[p], aw, s->sbn_x, s->sbn_y, xdec, ydec);
+ }
+ }
+ for (x = 0; x < s->sbn_x; x++) {
+ for (y = 0; y < s->sbn_y; y++) {
+ uint8_t u, l, res;
+ if (s->dering[x][y] == DERING_SKIP) {
+ s->dering[x][y] = 0;
+ continue;
+ }
+ u = y > 0 ? s->dering[x][y-1] << 1 : 0;
+ l = x > 0 ? s->dering[x-1][y] << 0 : 0;
+ res = daalaent_decode_cdf_adapt(&s->e, &s->dering_cdf, u + l, 2);
+ s->dering[x][y] = res;
+ }
+ }
+}
+
+static inline void reset_cdfs(DaalaContext *s, DaalaBitstreamHeader *h)
+{
+ int i, j, k;
+ const int pvq_ex_const = 2 << 16;
+
+ /* CDFs */
+ daalaent_cdf_reset(&s->dering_cdf);
+ daalaent_cdf_reset(&s->q_scale_cdf);
+ daalaent_cdf_reset(&s->haar_bit_cdf);
+ daalaent_cdf_reset(&s->haar_split_cdf);
+ daalaent_cdf_reset(&s->skip_cdf);
+ daalaent_cdf_reset(&s->haar_coef_cdf);
+
+ /* PVQ CDFs */
+ daalaent_cdf_reset(&s->pvq.pvqcodeword_cdf);
+ daalaent_cdf_reset(&s->pvq.pvqskip_cdf);
+ daalaent_cdf_reset(&s->pvq.pvqtheta_gain_cdf);
+ daalaent_cdf_reset(&s->pvq.pvqtheta_mcdf);
+ daalaent_cdf_reset(&s->pvq.pvqgain_ref_mcdf);
+ daalaent_cdf_reset(&s->pvq.pvqgain_noref_mcdf);
+
+ /* DC MCDFs + Expectation values */
+ for (i = 0; i < DAALA_NBSIZES*DAALAPVQ_PARTITIONS_MAX; i++)
+ s->pvq.pvqtheta_ex[i] = h->key_frame ? 24576 : pvq_ex_const;
+
+ for (i = 0; i < 2*DAALA_NBSIZES; i++) {
+ s->pvq.pvqadapt[4*i + DAALAENT_PVQ_K] = 384;
+ s->pvq.pvqadapt[4*i + DAALAENT_PVQ_SUM_EX] = 256;
+ s->pvq.pvqadapt[4*i + DAALAENT_PVQ_COUNT] = 104;
+ s->pvq.pvqadapt[4*i + DAALAENT_PVQ_COUNT_EX] = 128;
+ }
+
+ for (i = 0; i < s->fmt->planes; i++) {
+ daalaent_cdf_reset(&s->haar_dc_mcdf[i]);
+ s->haar_sb_ex[i] = i > 0 ? 8 : 32768;
+ for (j = 0; j < DAALA_NBSIZES; j++) {
+ for (k = 0; k < 3; k++)
+ s->haar_dc_ex[i][j][k] = i > 0 ? 8 : 32768;
+ for (k = 0; k < DAALAPVQ_PARTITIONS_MAX; k++)
+ s->pvq.pvqgain_ex[i][j][k] = pvq_ex_const;
+ }
+ }
+}
+
+//#define DEBUGGING
+
+#ifdef DEBUGGING
+#include "libavutil/ffversion.h"
+#include "libavutil/internal.h"
+#include "libavutil/xga_font_data.h"
+#include "libavutil/pixdesc.h"
+#include "cga_data.h"
+#include <sys/time.h>
+
+#define PRINT_OSD_DEBUG(format, ...) \
+ snprintf(sbuf, sizeof(sbuf), format, ##__VA_ARGS__); \
+ print_debug_info(dst1, frame->linesize[0], sbuf); \
+ dst1 += frame->linesize[0] * 10; \
+
+static void print_debug_info(uint8_t *dst1, int linesize, const char *str)
+{
+ int i;
+ for (i = 0; i < strlen(str); i++) {
+ ff_draw_pc_font(dst1 + (i+1)*8, linesize,
+ avpriv_cga_font, 8, str[i], 255, 1);
+ }
+}
+#endif
+
+static int daala_decode_frame(AVCodecContext *avctx, void *data,
+ int *got_frame, AVPacket *avpkt)
+{
+ DaalaContext *s = avctx->priv_data;
+ AVFrame *frame = data;
+ const uint8_t *buf = avpkt->data;
+ const int buf_size = avpkt->size;
+ int ret, p, i, j;
+
+ if ((ret = ff_get_buffer(avctx, frame, 0)) < 0)
+ return ret;
+
+#ifdef DEBUGGING
+ long long unsigned dec_time;
+ struct timeval tv1, tv2;
+ char sbuf[50];
+ uint8_t *dst1 = frame->data[0] + frame->linesize[0] * 8;
+ gettimeofday(&tv1, NULL);
+#endif
+
+ /* Init the entropy decoding context */
+ daalaent_decode_init(&s->e, buf, buf_size);
+
+ /* Read the frame miniheader */
+ if (daalaent_decode_frame_header(&s->e, &s->s.h, s->fmt->planes))
+ return AVERROR_INVALIDDATA;
+
+ /* Update AVFrame type */
+ set_frame_type(&s->s.h, frame);
+
+ /* Init the PVQ quantization matrix */
+ if (s->last_qm != s->s.h.qm) {
+ daala_init_qmatrix(s->pvq.qmatrix, s->pvq.qmatrix_inv, s->s.h.qm);
+ s->last_qm = s->s.h.qm;
+ }
+
+ /* Prepare the CDFs and MCDFs for use */
+ reset_cdfs(s, &s->s.h);
+
+ /* TODO: Actually read the MVs */
+ get_motion_vectors(s);
+
+ /* Get per-plane quantizers (all identical for now) */
+ get_plane_quantizers(s);
+
+ /* NOTE: Remove */
+ if (s->s.h.key_frame) {
+ /* Recursively decode all blocks */
+ decode_blocks(s, frame);
+
+ /* Apply frame postfilter */
+ apply_filtering(s, frame);
+ }
+
+ /* Copy data to frame */
+ for (p = 0; p < s->fmt->planes; p++) {
+ dctcoef *src = s->ccoef[p];
+ uint8_t *dst = frame->data[p];
+ int stride = s->width >> s->fmt->dec[p][0];
+ int r_w = avctx->width >> s->fmt->dec[p][0];
+ int r_h = avctx->height >> s->fmt->dec[p][1];
+ int cs = !s->quantizer[p] ? (s->fmt->depth_mode - 1)*2 : DAALA_CSHIFT;
+ for (i = 0; i < r_h; i++) {
+ for (j = 0; j < r_w; j++)
+ dst[j] = av_clip_uint8((src[j] + (1 << cs >> 1) >> cs) + 128);
+ dst += frame->linesize[p];
+ src += stride;
+ }
+ }
+
+#ifdef DEBUGGING
+ gettimeofday(&tv2, NULL);
+ dec_time = 1000*(tv2.tv_sec - tv1.tv_sec) + (tv2.tv_usec - tv1.tv_usec)/1000;
+ PRINT_OSD_DEBUG("FFmpeg Daala decoder: %s", FFMPEG_VERSION)
+ PRINT_OSD_DEBUG("Padded size: %i %i", s->width, s->height);
+ PRINT_OSD_DEBUG("Superblocks: %i, %i %i", 4 << (DAALA_NBSIZES-1), s->sbn_x, s->sbn_y);
+ PRINT_OSD_DEBUG("Format: %s", av_get_pix_fmt_name(s->fmt->fmt))
+ PRINT_OSD_DEBUG("PTS: %li DTS: %li", avpkt->pts, avpkt->dts)
+ PRINT_OSD_DEBUG("Packet length: %0.2f kb", (avpkt->size)*0.001f)
+ PRINT_OSD_DEBUG("Decoding time: %llu msec", dec_time)
+ PRINT_OSD_DEBUG("Keyframe: %i", s->s.h.key_frame)
+ PRINT_OSD_DEBUG("Num. references: %i", s->s.h.ref_num)
+ PRINT_OSD_DEBUG("Plane quantizers: %i %i %i", s->quantizer[0], s->quantizer[1], s->quantizer[2])
+ PRINT_OSD_DEBUG("Activity masking: %i", s->s.h.key_frame)
+ PRINT_OSD_DEBUG("Quantization matrix: %s", s->s.h.qm ? "HVS" : "Flat")
+ PRINT_OSD_DEBUG("Haar wavelet: %i", s->s.h.haar)
+ PRINT_OSD_DEBUG("Golden boost: %i", s->s.h.golden)
+ PRINT_OSD_DEBUG("Bitstream error: %i", s->e.err)
+#endif
+
+ if (s->e.err)
+ av_log(avctx, AV_LOG_WARNING, "Bitstream error!\n");
+
+ *got_frame = 1;
+
+ return daalaent_bits_count(&s->e) >> 3;
+}
+
+static av_cold int daala_decode_free(AVCodecContext *avctx)
+{
+ int i;
+ DaalaContext *s = avctx->priv_data;
+
+ /* Blocksizes array */
+ s->bsizes -= DAALA_BSIZE_GRID*s->bsizes_stride + DAALA_BSIZE_GRID;
+ av_freep(&s->bsizes);
+
+ /* CDF */
+ daalaent_cdf_free(&s->dering_cdf);
+ daalaent_cdf_free(&s->q_scale_cdf);
+ daalaent_cdf_free(&s->haar_bit_cdf);
+ daalaent_cdf_free(&s->haar_split_cdf);
+ daalaent_cdf_free(&s->skip_cdf);
+ daalaent_cdf_free(&s->haar_coef_cdf);
+ for (i = 0; i < s->fmt->planes; i++)
+ daalaent_cdf_free(&s->haar_dc_mcdf[i]);
+
+ /* PVQ CDFs */
+ daalaent_cdf_free(&s->pvq.pvqcodeword_cdf);
+ daalaent_cdf_free(&s->pvq.pvqskip_cdf);
+ daalaent_cdf_free(&s->pvq.pvqtheta_gain_cdf);
+ daalaent_cdf_free(&s->pvq.pvqtheta_mcdf);
+ daalaent_cdf_free(&s->pvq.pvqgain_ref_mcdf);
+ daalaent_cdf_free(&s->pvq.pvqgain_noref_mcdf);
+
+ /* Coefs */
+ for (i = 0; i < s->fmt->planes; i++) {
+ av_freep(&s->ccoef[i]);
+ av_freep(&s->dcoef[i]);
+ av_freep(&s->lcoef[i]);
+ }
+
+ /* 2(.5)D tables */
+ DAALA_FREE_2D_ARRAY(s->dering, s->sbn_x);
+ DAALA_FREE_2D_ARRAY(s->q_scale, s->sbn_x);
+ DAALA_FREE_STATIC_2D_ARRAY(s->haar_dc_buf, s->fmt->planes, s->sbn_x);
+
+ return 0;
+}
+
+static av_cold int daala_decode_init(AVCodecContext *avctx)
+{
+ int i, err = 0;
+ DaalaContext *s = avctx->priv_data;
+
+ /* Inits the QM, the decode_frame function will reinit it if it's not HVS */
+ s->last_qm = 1;
+ daala_init_qmatrix(s->pvq.qmatrix, s->pvq.qmatrix_inv, s->last_qm);
+
+ s->width = FFALIGN(avctx->width, DAALA_BSIZE_MAX);
+ s->height = FFALIGN(avctx->height, DAALA_BSIZE_MAX);
+
+ s->fmt = daala_find_p_format(avctx->pix_fmt);
+ if (!s->fmt) {
+ av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format %i!\n", avctx->pix_fmt);
+ return AVERROR(ENOTSUP);
+ }
+ s->sbn_x = s->width >> DAALA_LOG_BSIZE_MAX;
+ s->sbn_y = s->height >> DAALA_LOG_BSIZE_MAX;
+
+ /* Block sizes array */
+ s->bsizes_stride = (s->sbn_x + 2)*DAALA_BSIZE_GRID;
+ s->bsizes = av_malloc(sizeof(enum DaalaBsize)*s->bsizes_stride*(s->sbn_y + 2)*DAALA_BSIZE_GRID);
+ if (!s->bsizes)
+ return AVERROR(ENOMEM);
+ s->bsizes += DAALA_BSIZE_GRID*s->bsizes_stride + DAALA_BSIZE_GRID;
+
+ /* CDF init */
+ err |= daalaent_cdf_alloc(&s->dering_cdf, 4, 2, 128, 0, 2, 0);
+ err |= daalaent_cdf_alloc(&s->q_scale_cdf, 8, 4, 128, 0, 2, 0);
+ err |= daalaent_cdf_alloc(&s->haar_bit_cdf, 3, 16, 128, 0, 2, 0);
+ err |= daalaent_cdf_alloc(&s->haar_split_cdf, 15*2*5, 16, 128, 0, 2, 0);
+ err |= daalaent_cdf_alloc(&s->skip_cdf, DAALA_NBSIZES*2, 5, 128, 0, 2, 0);
+ err |= daalaent_cdf_alloc(&s->haar_coef_cdf, 15*3*(DAALA_NBSIZES+1), 16, 128, 0, 2, 0);
+ for (i = 0; i < s->fmt->planes; i++)
+ err |= daalaent_cdf_alloc(&s->haar_dc_mcdf[i], DAALAENT_MODEL_TAB, 16, 64, 0, 0, 1);
+
+ /* PVQ CDFs */
+ err |= daalaent_cdf_alloc(&s->pvq.pvqcodeword_cdf, 4, 16, 128, 0, 0, 0);
+ err |= daalaent_cdf_alloc(&s->pvq.pvqskip_cdf, 2*(DAALA_NBSIZES-1), 7, 128, 0, 2, 0);
+ err |= daalaent_cdf_alloc(&s->pvq.pvqtheta_gain_cdf, 2*DAALA_NBSIZES*DAALAPVQ_PARTITIONS_MAX, 16, 128, 0, 2, 0);
+ err |= daalaent_cdf_alloc(&s->pvq.pvqtheta_mcdf, DAALAENT_MODEL_TAB, 16, 64, 0, 0, 1);
+ err |= daalaent_cdf_alloc(&s->pvq.pvqgain_ref_mcdf, DAALAENT_MODEL_TAB, 16, 64, 0, 0, 1);
+ err |= daalaent_cdf_alloc(&s->pvq.pvqgain_noref_mcdf, DAALAENT_MODEL_TAB, 16, 64, 0, 0, 1);
+
+ if (err)
+ goto alloc_fail;
+
+ /* Arrays */
+ DAALA_ALLOC_2D_ARRAY_GOTO(s->dering, s->sbn_x, s->sbn_y, uint8_t, alloc_fail);
+ DAALA_ALLOC_2D_ARRAY_GOTO(s->q_scale, s->sbn_x, s->sbn_y, uint8_t, alloc_fail);
+ DAALA_ALLOC_STATIC_2D_ARRAY_GOTO(s->haar_dc_buf, s->fmt->planes, s->sbn_x,
+ s->sbn_y, dctcoef, alloc_fail);
+
+ /* Coeffs - GET RID OF THOSE! */
+ for (i = 0; i < s->fmt->planes; i++) {
+ int r_w = s->width >> s->fmt->dec[i][0];
+ int r_h = s->height >> s->fmt->dec[i][1];
+ s->lcoef[i] = av_malloc(r_w*r_h*sizeof(dctcoef));
+ s->dcoef[i] = av_malloc(r_w*r_h*sizeof(dctcoef));
+ s->ccoef[i] = av_malloc(r_w*r_h*sizeof(dctcoef));
+ if (!s->dcoef[i] || !s->lcoef[i] || !s->ccoef[i])
+ goto alloc_fail;
+ }
+
+ if ((err = daaladsp_init(&s->dsp, s->fmt->depth)))
+ return err;
+
+ return 0;
+
+alloc_fail:
+ av_log(avctx, AV_LOG_ERROR, "Failed to allocate memory!\n");
+ daala_decode_free(avctx);
+ return AVERROR(ENOMEM);
+}
+
+AVCodec ff_daala_decoder = {
+ .name = "daala",
+ .long_name = NULL_IF_CONFIG_SMALL("Daala"),
+ .type = AVMEDIA_TYPE_VIDEO,
+ .id = AV_CODEC_ID_DAALA,
+ .priv_data_size = sizeof(DaalaContext),
+ .init = daala_decode_init,
+ .close = daala_decode_free,
+ .decode = daala_decode_frame,
+};
diff --git a/libavcodec/daaladsp.c b/libavcodec/daaladsp.c
new file mode 100644
index 0000000..ba03520
--- /dev/null
+++ b/libavcodec/daaladsp.c
@@ -0,0 +1,2123 @@
+/*
+ * Copyright 2001-2015 Xiph.Org and contributors.
+ * Copyright 2015 Rostislav Pehlivanov <atomnuker at gmail.com>
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+ * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "daaladsp.h"
+#include "daalatab.h"
+#include "daala_utils.h"
+
+#define pixel int32_t
+
+#define DAALA_DCT_RSHIFT(_a, _b) (((pixel)(((uint32_t)(_a) >> (32 - (_b))) + (_a))) >> (_b))
+
+static void daala_haar_iwt(uint8_t *_dst, const int dst_stride,
+ const uint8_t *_src, const int src_stride,
+ const int ln)
+{
+ int i, j, p, lvl;
+ pixel *dst = (pixel *)_dst;
+ pixel *src = (pixel *)_src;
+ dst[0] = src[0];
+ for (lvl = ln - 1; lvl > -1; lvl--) {
+ p = 1 << (ln - 1 - lvl);
+ for (i = p - 1; i > -1; i--) {
+ for (j = p - 1; j > -1; j--) {
+ const pixel a = src[(i + 0)*src_stride + j + p];
+ const pixel b = src[(i + p)*src_stride + j + 0];
+ const pixel c = src[(i + p)*src_stride + j + p] - a;
+ const pixel d = dst[(i + 0)*dst_stride + j + 0] + b;
+ const pixel e = (d - c) >> 1;
+ const pixel f = e - a;
+ const pixel g = e - b;
+ dst[(2*i + 1)*dst_stride + 2*j + 0] = f;
+ dst[(2*i + 0)*dst_stride + 2*j + 1] = g;
+ dst[(2*i + 0)*dst_stride + 2*j + 0] = d - f;
+ dst[(2*i + 1)*dst_stride + 2*j + 1] = c + g;
+ }
+ }
+ }
+}
+
+static av_always_inline void idct_1D_4(pixel *x, int xstride, const pixel y[4])
+{
+ int t2h, t0 = y[0], t1 = y[1], t2 = y[2], t3 = y[3];
+ t3 += (t1*18293 + 8192) >> 14;
+ t1 -= (t3*21407 + 16384) >> 15;
+ t3 += (t1*23013 + 16384) >> 15;
+ t2 = t0 - t2;
+ t2h = DAALA_DCT_RSHIFT(t2, 1);
+ t0 -= t2h - DAALA_DCT_RSHIFT(t3, 1);
+ t1 = t2h - t1;
+ *(x + 0*xstride) = (pixel)t0;
+ *(x + 1*xstride) = (pixel)(t2 - t1);
+ *(x + 2*xstride) = (pixel)t1;
+ *(x + 3*xstride) = (pixel)(t0 - t3);
+}
+
+static av_always_inline void idct_1D_8(pixel *x, int xstride, const pixel y[16])
+{
+ int t1h, t4h, t6h, t0 = y[0], t1 = y[1], t2 = y[2], t3 = y[3], t4 = y[4];
+ int t5 = y[5], t6 = y[6], t7 = y[7];
+ t5 -= (t3*2485 + 4096) >> 13;
+ t3 += (t5*18205 + 16384) >> 15;
+ t5 -= (t3*2485 + 4096) >> 13;
+ t7 -= (t1*3227 + 16384) >> 15;
+ t1 += (t7*6393 + 16384) >> 15;
+ t7 -= (t1*3227 + 16384) >> 15;
+ t1 += t3;
+ t1h = DAALA_DCT_RSHIFT(t1, 1);
+ t3 = t1h - t3;
+ t5 += t7;
+ t7 = DAALA_DCT_RSHIFT(t5, 1) - t7;
+ t3 += (t5*7489 + 4096) >> 13;
+ t5 -= (t3*11585 + 8192) >> 14;
+ t3 -= (t5*19195 + 16384) >> 15;
+ t6 += (t2*21895 + 16384) >> 15;
+ t2 -= (t6*15137 + 8192) >> 14;
+ t6 += (t2*21895 + 16384) >> 15;
+ t0 += (t4*13573 + 16384) >> 15;
+ t4 -= (t0*11585 + 8192) >> 14;
+ t0 += (t4*13573 + 16384) >> 15;
+ t4 = t2 - t4;
+ t4h = DAALA_DCT_RSHIFT(t4, 1);
+ t2 = t4h - t2;
+ t6 = t0 - t6;
+ t6h = DAALA_DCT_RSHIFT(t6, 1);
+ t0 -= t6h;
+ t7 = t6h - t7;
+ t6 -= t7;
+ t2 += DAALA_DCT_RSHIFT(t3, 1);
+ t3 = t2 - t3;
+ t5 += t4h;
+ t4 -= t5;
+ t0 += t1h;
+ t1 = t0 - t1;
+ *(x + 0*xstride) = (pixel)t0;
+ *(x + 1*xstride) = (pixel)t4;
+ *(x + 2*xstride) = (pixel)t2;
+ *(x + 3*xstride) = (pixel)t6;
+ *(x + 4*xstride) = (pixel)t7;
+ *(x + 5*xstride) = (pixel)t3;
+ *(x + 6*xstride) = (pixel)t5;
+ *(x + 7*xstride) = (pixel)t1;
+}
+
+static av_always_inline void idct_1D_16(pixel *x, int xstride, const pixel y[16])
+{
+ int tfh, tbh, t1h, tdh, t8h, tch, tah, t2h;
+ int t0 = y[0], t1 = y[1], t2 = y[2], t3 = y[3], t4 = y[4], t5 = y[5];
+ int t6 = y[6], t7 = y[7], t8 = y[8], t9 = y[9], ta = y[10], tb = y[11];
+ int tc = y[12], td = y[13], te = y[14], tf = y[15];
+ t1 += (tf*13573 + 16384) >> 15;
+ tf -= (t1*11585 + 8192) >> 14;
+ t1 += ((tf*13573 + 16384) >> 15)+t7;
+ td -= (t3*10947 + 8192) >> 14;
+ t3 += (td*15137 + 8192) >> 14;
+ t5 += (tb*10947 + 8192) >> 14;
+ tb -= (t5*15137 + 8192) >> 14;
+ t5 += (tb*10947 + 8192) >> 14;
+ td += t5 - ((t3*21895 + 16384) >> 15);
+ tf = t9 - tf;
+ tb += t3;
+ tfh = DAALA_DCT_RSHIFT(tf, 1);
+ t9 -= tfh;
+ tbh = DAALA_DCT_RSHIFT(tb, 1);
+ t3 += tfh - tbh;
+ t1h = DAALA_DCT_RSHIFT(t1, 1);
+ t7 = t1h - t7 + tbh;
+ tdh = DAALA_DCT_RSHIFT(td, 1);
+ t5 += t1h - tdh;
+ t9 = tdh - t9;
+ td -= t9;
+ tf = t3 - tf;
+ t1 -= t5 + ((tf*20055 + 16384) >> 15);
+ tf += (t1*23059 + 8192) >> 14;
+ t1 -= (tf*5417 + 4096) >> 13;
+ tb = t7 - tb;
+ t9 += (t7*14101 + 8192) >> 14;
+ t7 += (t9*3363 + 4096) >> 13;
+ t9 -= (t7*12905 + 8192) >> 14;
+ tb -= (td*4379 + 8192) >> 14;
+ td += (tb*20435 + 8192) >> 14;
+ tb -= (td*17515 + 16384) >> 15;
+ t3 += (t5*851 + 4096) >> 13;
+ t5 += (t3*14699 + 8192) >> 14;
+ t3 -= (t5*1035 + 1024) >> 11;
+ t6 -= (ta*7335 + 16384) >> 15;
+ ta -= (t6*12873 + 8192) >> 14;
+ te += (t2*2873 + 1024) >> 11;
+ t2 += (te*9041 + 16384) >> 15;
+ t6 = DAALA_DCT_RSHIFT(t2, 1) - t6 - ((ta*8593 + 8192) >> 14);
+ te = DAALA_DCT_RSHIFT(ta, 1) - te + ((t2*2275 + 1024) >> 11);
+ t2 -= t6;
+ ta -= te;
+ t6 -= (ta*13573 + 16384) >> 15;
+ ta += (t6*11585 + 8192) >> 14;
+ t6 -= (ta*13573 + 16384) >> 15;
+ tc += (t4*9147 + 4096) >> 13;
+ t4 -= (tc*10703 + 8192) >> 14;
+ tc += (t4*23013 + 16384) >> 15;
+ t8 = t0 - t8;
+ t8h = DAALA_DCT_RSHIFT(t8, 1);
+ t0 -= t8h - DAALA_DCT_RSHIFT(tc, 1);
+ t4 = t8h - t4;
+ t8 += t6 - t4;
+ tc = t0 - tc + te;
+ ta = t4 - ta;
+ t2 = t0 - t2;
+ tch = DAALA_DCT_RSHIFT(tc, 1);
+ te = tch - te;
+ tah = DAALA_DCT_RSHIFT(ta, 1);
+ t4 -= tah;
+ t8h = DAALA_DCT_RSHIFT(t8, 1);
+ t6 = t8h - t6;
+ t2h = DAALA_DCT_RSHIFT(t2, 1);
+ t0 -= t2h;
+ t3 = t2h - t3;
+ t6 += DAALA_DCT_RSHIFT(td, 1);
+ t9 = tah - t9;
+ te += DAALA_DCT_RSHIFT(tf, 1);
+ t1 = tch - t1;
+ t4 += DAALA_DCT_RSHIFT(t7, 1);
+ tb = t8h - tb;
+ t0 += DAALA_DCT_RSHIFT(t5, 1);
+ *(x + 0*xstride) = (pixel)t0;
+ *(x + 1*xstride) = (pixel)(t8 - tb);
+ *(x + 2*xstride) = (pixel)t4;
+ *(x + 3*xstride) = (pixel)(tc - t1);
+ *(x + 4*xstride) = (pixel)te;
+ *(x + 5*xstride) = (pixel)(ta - t9);
+ *(x + 6*xstride) = (pixel)t6;
+ *(x + 7*xstride) = (pixel)(t2 - t3);
+ *(x + 8*xstride) = (pixel)t3;
+ *(x + 9*xstride) = (pixel)(t6 - td);
+ *(x + 10*xstride) = (pixel)t9;
+ *(x + 11*xstride) = (pixel)(te - tf);
+ *(x + 12*xstride) = (pixel)t1;
+ *(x + 13*xstride) = (pixel)(t4 - t7);
+ *(x + 14*xstride) = (pixel)tb;
+ *(x + 15*xstride) = (pixel)(t0 - t5);
+}
+
+#define OD_IDCT_2(t0, t1) \
+ /* Embedded 2-point orthonormal Type-II iDCT. */ \
+ do { \
+ /* 3393/8192 ~= Tan[pi/8] ~= 0.414213562373095 */ \
+ t0 += (t1*3393 + 4096) >> 13; \
+ /* 5793/8192 ~= Sin[pi/4] ~= 0.707106781186547 */ \
+ t1 -= (t0*5793 + 4096) >> 13; \
+ /* 13573/32768 ~= Tan[pi/8] ~= 0.414213562373095 */ \
+ t0 += (t1*13573 + 16384) >> 15; \
+ } \
+ while (0)
+
+#define OD_IDST_2(t0, t1) \
+ /* Embedded 2-point orthonormal Type-IV iDST. */ \
+ do { \
+ /* 10947/16384 ~= Tan[3*Pi/16]) ~= 0.668178637919299 */ \
+ t0 += (t1*10947 + 8192) >> 14; \
+ /* 473/512 ~= Sin[3*Pi/8] ~= 0.923879532511287 */ \
+ t1 -= (t0*473 + 256) >> 9; \
+ /* 10947/16384 ~= Tan[3*Pi/16] ~= 0.668178637919299 */ \
+ t0 += (t1*10947 + 8192) >> 14; \
+ } \
+ while (0)
+
+#define OD_IDCT_4_ASYM(t0, t2, t1, t1h, t3, t3h) \
+ /* Embedded 4-point asymmetric Type-II iDCT. */ \
+ do { \
+ OD_IDST_2(t3, t2); \
+ OD_IDCT_2(t0, t1); \
+ t1 = t2 - t1; \
+ t1h = DAALA_DCT_RSHIFT(t1, 1); \
+ t2 = t1h - t2; \
+ t3 = t0 - t3; \
+ t3h = DAALA_DCT_RSHIFT(t3, 1); \
+ t0 -= t3h; \
+ } \
+ while (0)
+
+#define OD_IDST_4_ASYM(t0, t0h, t2, t1, t3) \
+ /* Embedded 4-point asymmetric Type-IV iDST. */ \
+ do { \
+ /* 8757/16384 ~= Tan[5*Pi/32] ~= 0.534511135950792 */ \
+ t1 -= (t2*8757 + 8192) >> 14; \
+ /* 6811/8192 ~= Sin[5*Pi/16] ~= 0.831469612302545 */ \
+ t2 += (t1*6811 + 4096) >> 13; \
+ /* 8757/16384 ~= Tan[5*Pi/32] ~= 0.534511135950792 */ \
+ t1 -= (t2*8757 + 8192) >> 14; \
+ /* 6723/8192 ~= Tan[7*Pi/32] ~= 0.820678790828660 */ \
+ t3 -= (t0*6723 + 4096) >> 13; \
+ /* 8035/8192 ~= Sin[7*Pi/16] ~= 0.980785280403230 */ \
+ t0 += (t3*8035 + 4096) >> 13; \
+ /* 6723/8192 ~= Tan[7*Pi/32] ~= 0.820678790828660 */ \
+ t3 -= (t0*6723 + 4096) >> 13; \
+ t0 += t2; \
+ t0h = DAALA_DCT_RSHIFT(t0, 1); \
+ t2 = t0h - t2; \
+ t1 += t3; \
+ t3 -= DAALA_DCT_RSHIFT(t1, 1); \
+ /* -19195/32768 ~= Tan[Pi/8] - Tan[Pi/4] ~= -0.585786437626905 */ \
+ t1 -= (t2*19195 + 16384) >> 15; \
+ /* 11585/16384 ~= Sin[Pi/4] ~= 0.707106781186548 */ \
+ t2 -= (t1*11585 + 8192) >> 14; \
+ /* 7489/8192 ~= Tan[Pi/8] + Tan[Pi/4]/2 ~= 0.914213562373095 */ \
+ t1 += (t2*7489 + 4096) >> 13; \
+ } \
+ while (0)
+
+
+#define OD_IDCT_8(t0, t4, t2, t6, t1, t5, t3, t7) \
+ /* Embedded 8-point orthonormal Type-II iDCT. */ \
+ do { \
+ int t1h_; \
+ int t3h_; \
+ int t7h_; \
+ OD_IDST_4_ASYM(t7, t7h_, t5, t6, t4); \
+ OD_IDCT_4_ASYM(t0, t2, t1, t1h_, t3, t3h_); \
+ t4 = t3h_ - t4; \
+ t3 -= t4; \
+ t2 += DAALA_DCT_RSHIFT(t5, 1); \
+ t5 = t2 - t5; \
+ t6 = t1h_ - t6; \
+ t1 -= t6; \
+ t0 += t7h_; \
+ t7 = t0 - t7; \
+ } \
+ while (0)
+
+#define OD_IDST_8(t0, t4, t2, t6, t1, t5, t3, t7) \
+ /* Embedded 8-point orthonormal Type-IV iDST. */ \
+ do { \
+ int t0h; \
+ int t2h; \
+ int t5h_; \
+ int t7h_; \
+ /* 11725/32768 ~= Tan[7*Pi/64] ~= 0.357805721314524 */ \
+ t1 += (t6*11725 + 16384) >> 15; \
+ /* 5197/8192 ~= Sin[7*Pi/32] ~= 0.634393284163645 */ \
+ t6 -= (t1*5197 + 4096) >> 13; \
+ /* 11725/32768 ~= Tan[7*Pi/64] ~= 0.357805721314524 */ \
+ t1 += (t6*11725 + 16384) >> 15; \
+ /* 2455/4096 ~= Tan[11*Pi/64] ~= 0.599376933681924 */ \
+ t2 += (t5*2455 + 2048) >> 12; \
+ /* 7225/8192 ~= Sin[11*Pi/32] ~= 0.881921264348355 */ \
+ t5 -= (t2*7225 + 4096) >> 13; \
+ /* 2455/4096 ~= Tan[11*Pi/64] ~= 0.599376933681924 */ \
+ t2 += (t5*2455 + 2048) >> 12; \
+ /* 4861/32768 ~= Tan[3*Pi/64] ~= 0.148335987538347 */ \
+ t3 += (t4*4861 + 16384) >> 15; \
+ /* 1189/4096 ~= Sin[3*Pi/32] ~= 0.290284677254462 */ \
+ t4 -= (t3*1189 + 2048) >> 12; \
+ /* 4861/32768 ~= Tan[3*Pi/64] ~= 0.148335987538347 */ \
+ t3 += (t4*4861 + 16384) >> 15; \
+ /* 7425/8192 ~= Tan[15*Pi/64] ~= 0.906347169019147 */ \
+ t0 += (t7*7425 + 4096) >> 13; \
+ /* 8153/8192 ~= Sin[15*Pi/32] ~= 0.995184726672197 */ \
+ t7 -= (t0*8153 + 4096) >> 13; \
+ /* 7425/8192 ~= Tan[15*Pi/64] ~= 0.906347169019147 */ \
+ t0 += (t7*7425 + 4096) >> 13; \
+ /* TODO: Can we move this into another operation */ \
+ t7 = -t7; \
+ t7 -= t6; \
+ t7h_ = DAALA_DCT_RSHIFT(t7, 1); \
+ t6 += t7h_; \
+ t2 -= t3; \
+ t2h = DAALA_DCT_RSHIFT(t2, 1); \
+ t3 += t2h; \
+ t0 += t1; \
+ t0h = DAALA_DCT_RSHIFT(t0, 1); \
+ t1 -= t0h; \
+ t5 = t4 - t5; \
+ t5h_ = DAALA_DCT_RSHIFT(t5, 1); \
+ t4 -= t5h_; \
+ t1 += t5h_; \
+ t5 = t1 - t5; \
+ t3 -= t0h; \
+ t0 += t3; \
+ t6 += t2h; \
+ t2 = t6 - t2; \
+ t4 += t7h_; \
+ t7 -= t4; \
+ /* 3259/16384 ~= Tan[Pi/16] ~= 0.198912367379658 */ \
+ t1 += (t6*3259 + 8192) >> 14; \
+ /* 3135/8192 ~= Sin[Pi/8] ~= 0.382683432365090 */ \
+ t6 -= (t1*3135 + 4096) >> 13; \
+ /* 3259/16384 ~= Tan[Pi/16] ~= 0.198912367379658 */ \
+ t1 += (t6*3259 + 8192) >> 14; \
+ /* 10947/16384 ~= Tan[3*Pi/16] ~= 0.668178637919299 */ \
+ t5 += (t2*10947 + 8192) >> 14; \
+ /* 15137/16384 ~= Sin[3*Pi/8] ~= 0.923879532511287 */ \
+ t2 -= (t5*15137 + 8192) >> 14; \
+ /* 21895/32768 ~= Tan[3*Pi/16] ~= 0.668178637919299 */ \
+ t5 += (t2*21895 + 16384) >> 15; \
+ /* 13573/32768 ~= Tan[Pi/8] ~= 0.414213562373095 */ \
+ t3 += (t4*13573 + 16384) >> 15; \
+ /* 11585/16384 ~= Sin[Pi/4] ~= 0.707106781186547 */ \
+ t4 -= (t3*11585 + 8192) >> 14; \
+ /* 13573/32768 ~= Tan[Pi/8] ~= 0.414213562373095 */ \
+ t3 += (t4*13573 + 16384) >> 15; \
+ } \
+ while (0)
+
+#define OD_IDCT_16_ASYM(t0, t8, t4, tc, t2, ta, t6, te, \
+t1, t1h, t9, t9h, t5, t5h, td, tdh, t3, t3h, tb, tbh, t7, t7h, tf, tfh) \
+/* Embedded 16-point asymmetric Type-II iDCT. */ \
+do { \
+ OD_IDST_8(tf, tb, td, t9, te, ta, tc, t8); \
+ OD_IDCT_8(t0, t4, t2, t6, t1, t5, t3, t7); \
+ t1 -= te; \
+ t1h = DAALA_DCT_RSHIFT(t1, 1); \
+ te += t1h; \
+ t9 = t6 - t9; \
+ t9h = DAALA_DCT_RSHIFT(t9, 1); \
+ t6 -= t9h; \
+ t5 -= ta; \
+ t5h = DAALA_DCT_RSHIFT(t5, 1); \
+ ta += t5h; \
+ td = t2 - td; \
+ tdh = DAALA_DCT_RSHIFT(td, 1); \
+ t2 -= tdh; \
+ t3 -= tc; \
+ t3h = DAALA_DCT_RSHIFT(t3, 1); \
+ tc += t3h; \
+ tb = t4 - tb; \
+ tbh = DAALA_DCT_RSHIFT(tb, 1); \
+ t4 -= tbh; \
+ t7 -= t8; \
+ t7h = DAALA_DCT_RSHIFT(t7, 1); \
+ t8 += t7h; \
+ tf = t0 - tf; \
+ tfh = DAALA_DCT_RSHIFT(tf, 1); \
+ t0 -= tfh; \
+ } \
+ while (0)
+
+#define OD_IDST_16_ASYM(t0, t0h, t8, t4, tc, t2, t2h, ta, t6, te, teh, \
+t1, t9, t5, td, t3, tb, t7, tf) \
+/* Embedded 16-point asymmetric Type-IV iDST. */ \
+do { \
+ int t1h_; \
+ int t3h_; \
+ int t4h; \
+ int t6h; \
+ int t9h_; \
+ int tbh_; \
+ int tch; \
+ /* 8247/16384 ~= Tan[19*Pi/128] ~= 0.503357699799294 */ \
+ t6 += (t9*8247 + 8192) >> 14; \
+ /* 1645/2048 ~= Sin[19*Pi/64] ~= 0.803207531480645 */ \
+ t9 -= (t6*1645 + 1024) >> 11; \
+ /* 8247/16384 ~= Tan[19*Pi/128] ~= 0.503357699799294 */ \
+ t6 += (t9*8247 + 8192) >> 14; \
+ /* 10375/16384 ~= Tan[23*Pi/128] ~= 0.633243016177569 */ \
+ t2 += (td*10375 + 8192) >> 14; \
+ /* 7405/8192 ~= Sin[23*Pi/64] ~= 0.903989293123443 */ \
+ td -= (t2*7405 + 4096) >> 13; \
+ /* 10375/16384 ~= Tan[23*Pi/128] ~= 0.633243016177569 */ \
+ t2 += (td*10375 + 8192) >> 14; \
+ /* 11539/16384 ~= Tan[25*Pi/128] ~= 0.704279460865044 */ \
+ tc += (t3*11539 + 8192) >> 14; \
+ /* 7713/8192 ~= Sin[25*Pi/64] ~= 0.941544065183021 */ \
+ t3 -= (tc*7713 + 4096) >> 13; \
+ /* 11539/16384 ~= Tan[25*Pi/128] ~= 0.704279460865044 */ \
+ tc += (t3*11539 + 8192) >> 14; \
+ /* 9281/16384 ~= Tan[21*Pi/128] ~= 0.566493002730344 */ \
+ ta += (t5*9281 + 8192) >> 14; \
+ /* 7027/8192 ~= Sin[21*Pi/64] ~= 0.857728610000272 */ \
+ t5 -= (ta*7027 + 4096) >> 13; \
+ /* 9281/16384 ~= Tan[21*Pi/128] ~= 0.566493002730344 */ \
+ ta += (t5*9281 + 8192) >> 14; \
+ /* 6393/8192 ~= Tan[27*Pi/128] ~= 0.780407659653944 */ \
+ t4 += (tb*6393 + 4096) >> 13; \
+ /* 3973/4096 ~= Sin[27*Pi/64] ~= 0.970031253194544 */ \
+ tb -= (t4*3973 + 2048) >> 12; \
+ /* 6393/8192 ~= Tan[27*Pi/128] ~= 0.780407659653944 */ \
+ t4 += (tb*6393 + 4096) >> 13; \
+ /* 7263/16384 ~= Tan[17*Pi/128] ~= 0.443269513890864 */ \
+ te += (t1*7263 + 8192) >> 14; \
+ /* 3035/4096 ~= Sin[17*Pi/64] ~= 0.740951125354959 */ \
+ t1 -= (te*3035 + 2048) >> 12; \
+ /* 14525/32768 ~= Tan[17*Pi/128] ~= 0.443269513890864 */ \
+ te += (t1*14525 + 16384) >> 15; \
+ /* 2417/32768 ~= Tan[3*Pi/128] ~= 0.0737644315224493 */ \
+ t8 -= (t7*2417 + 16384) >> 15; \
+ /* 601/4096 ~= Sin[3*Pi/64] ~= 0.146730474455362 */ \
+ t7 += (t8*601 + 2048) >> 12; \
+ /* 2417/32768 ~= Tan[3*Pi/128] ~= 0.0737644315224493 */ \
+ t8 -= (t7*2417 + 16384) >> 15; \
+ /* 7799/8192 ~= Tan[31*Pi/128] ~= 0.952079146700925 */ \
+ t0 += (tf*7799 + 4096) >> 13; \
+ /* 4091/4096 ~= Sin[31*Pi/64] ~= 0.998795456205172 */ \
+ tf -= (t0*4091 + 2048) >> 12; \
+ /* 7799/8192 ~= Tan[31*Pi/128] ~= 0.952079146700925 */ \
+ t0 += (tf*7799 + 4096) >> 13; \
+ /* TODO: Can we move these into another operation */ \
+ t1 = -t1; \
+ t3 = -t3; \
+ t5 = -t5; \
+ t9 = -t9; \
+ tb = -tb; \
+ td = -td; \
+ tf = -tf; \
+ t4 += ta; \
+ t4h = DAALA_DCT_RSHIFT(t4, 1); \
+ ta = t4h - ta; \
+ tb -= t5; \
+ tbh_ = DAALA_DCT_RSHIFT(tb, 1); \
+ t5 += tbh_; \
+ tc += t2; \
+ tch = DAALA_DCT_RSHIFT(tc, 1); \
+ t2 -= tch; \
+ t3 -= td; \
+ t3h_ = DAALA_DCT_RSHIFT(t3, 1); \
+ td += t3h_; \
+ t9 += t8; \
+ t9h_ = DAALA_DCT_RSHIFT(t9, 1); \
+ t8 -= t9h_; \
+ t6 -= t7; \
+ t6h = DAALA_DCT_RSHIFT(t6, 1); \
+ t7 += t6h; \
+ t1 += tf; \
+ t1h_ = DAALA_DCT_RSHIFT(t1, 1); \
+ tf -= t1h_; \
+ te -= t0; \
+ teh = DAALA_DCT_RSHIFT(te, 1); \
+ t0 += teh; \
+ ta += t9h_; \
+ t9 = ta - t9; \
+ t5 -= t6h; \
+ t6 += t5; \
+ td = teh - td; \
+ te = td - te; \
+ t2 = t1h_ - t2; \
+ t1 -= t2; \
+ t7 += t4h; \
+ t4 -= t7; \
+ t8 -= tbh_; \
+ tb += t8; \
+ t0 += tch; \
+ tc -= t0; \
+ tf -= t3h_; \
+ t3 += tf; \
+ /* TODO: Can we move this into another operation */ \
+ ta = -ta; \
+ /* 6723/8192 ~= Tan[7*Pi/32] ~= 0.820678790828660 */ \
+ td += (t2*6723 + 4096) >> 13; \
+ /* 16069/16384 ~= Sin[7*Pi/16] ~= 0.980785280403230 */ \
+ t2 -= (td*16069 + 8192) >> 14; \
+ /* 6723/8192 ~= Tan[7*Pi/32] ~= 0.820678790828660 */ \
+ td += (t2*6723 + 4096) >> 13; \
+ /* 2485/8192 ~= Tan[3*Pi/32] ~= 0.303346683607342 */ \
+ t5 -= (ta*2485 + 4096) >> 13; \
+ /* 18205/32768 ~= Sin[3*Pi/16] ~= 0.555570233019602 */ \
+ ta += (t5*18205 + 16384) >> 15; \
+ /* 2485/8192 ~= Tan[3*Pi/32] ~= 0.303346683607342 */ \
+ t5 -= (ta*2485 + 4096) >> 13; \
+ t2 += t5; \
+ t2h = DAALA_DCT_RSHIFT(t2, 1); \
+ t5 -= t2h; \
+ ta = td - ta; \
+ td -= DAALA_DCT_RSHIFT(ta, 1); \
+ /* 13573/16384 ~= 2*Tan[Pi/8] ~= 0.828427124746190 */ \
+ ta -= (t5*13573 + 8192) >> 14; \
+ /* 11585/32768 ~= Sin[Pi/4]/2 ~= 0.353553390593274 */ \
+ t5 += (ta*11585 + 16384) >> 15; \
+ /* 13573/16384 ~= 2*Tan[Pi/8] ~= 0.828427124746190 */ \
+ ta -= (t5*13573 + 8192) >> 14; \
+ /* 17515/32768 ~= Tan[5*Pi/32] ~= 0.534511135950792 */ \
+ t9 -= (t6*17515 + 16384) >> 15; \
+ /* 13623/16384 ~= Sin[5*Pi/16] ~= 0.831469612302545 */ \
+ t6 += (t9*13623 + 8192) >> 14; \
+ /* 17515/32768 ~= Tan[5*Pi/32]) ~= 0.534511135950792 */ \
+ t9 -= (t6*17515 + 16384) >> 15; \
+ /* 6723/8192 ~= Tan[7*Pi/32]) ~= 0.820678790828660 */ \
+ t1 -= (te*6723 + 4096) >> 13; \
+ /* 16069/16384 ~= Sin[7*Pi/16] ~= 0.980785280403230 */ \
+ te += (t1*16069 + 8192) >> 14; \
+ /* 6723/8192 ~= Tan[7*Pi/32]) ~= 0.820678790828660 */ \
+ t1 -= (te*6723 + 4096) >> 13; \
+ te += t6; \
+ teh = DAALA_DCT_RSHIFT(te, 1); \
+ t6 = teh - t6; \
+ t9 += t1; \
+ t1 -= DAALA_DCT_RSHIFT(t9, 1); \
+ /* -19195/32768 ~= Tan[Pi/8] - Tan[Pi/4] ~= -0.585786437626905 */ \
+ t9 -= (t6*19195 + 16384) >> 15; \
+ /* 11585/16384 ~= Sin[Pi/4] ~= 0.707106781186548 */ \
+ t6 -= (t9*11585 + 8192) >> 14; \
+ /* 7489/8192 ~= Tan[Pi/8] + Tan[Pi/4]/2 ~= 0.914213562373095 */ \
+ t9 += (t6*7489 + 4096) >> 13; \
+ tb = tc - tb; \
+ tc = DAALA_DCT_RSHIFT(tb, 1) - tc; \
+ t3 += t4; \
+ t4 = DAALA_DCT_RSHIFT(t3, 1) - t4; \
+ /* TODO: Can we move this into another operation */ \
+ t3 = -t3; \
+ t8 += tf; \
+ tf = DAALA_DCT_RSHIFT(t8, 1) - tf; \
+ t0 += t7; \
+ t0h = DAALA_DCT_RSHIFT(t0, 1); \
+ t7 = t0h - t7; \
+ /* 4161/16384 ~= Tan[3*Pi/16] - Tan[Pi/8] ~= 0.253965075546204 */ \
+ t3 += (tc*4161 + 8192) >> 14; \
+ /* 15137/16384 ~= Sin[3*Pi/8] ~= 0.923879532511287 */ \
+ tc -= (t3*15137 + 8192) >> 14; \
+ /* 14341/16384 ~= Tan[3*Pi/16] + Tan[Pi/8]/2 ~= 0.875285419105846 */ \
+ t3 += (tc*14341 + 8192) >> 14; \
+ /* 14341/16384 ~= Tan[3*Pi/16] + Tan[Pi/8]/2 ~= 0.875285419105846 */ \
+ t4 -= (tb*14341 + 8192) >> 14; \
+ /* 15137/16384 ~= Sin[3*Pi/8] ~= 0.923879532511287 */ \
+ tb += (t4*15137 + 8192) >> 14; \
+ /* 4161/16384 ~= Tan[3*Pi/16] - Tan[Pi/8] ~= 0.253965075546204 */ \
+ t4 -= (tb*4161 + 8192) >> 14; \
+ /* 13573/16384 ~= 2*Tan[Pi/8] ~= 0.828427124746190 */ \
+ t8 += (t7*13573 + 8192) >> 14; \
+ /* 11585/32768 ~= Sin[Pi/4]/2 ~= 0.353553390593274 */ \
+ t7 -= (t8*11585 + 16384) >> 15; \
+ /* 13573/16384 ~= 2*Tan[Pi/8] ~= 0.828427124746190 */ \
+ t8 += (t7*13573 + 8192) >> 14; \
+ /* TODO: Can we move these into another operation */ \
+ t1 = -t1; \
+ t5 = -t5; \
+ t9 = -t9; \
+ tb = -tb; \
+ td = -td; \
+ } \
+ while (0)
+
+#define OD_IDCT_32(t0, tg, t8, to, t4, tk, tc, ts, t2, ti, ta, tq, t6, tm, \
+te, tu, t1, th, t9, tp, t5, tl, td, tt, t3, tj, tb, tr, t7, tn, tf, tv) \
+/* Embedded 32-point orthonormal Type-II iDCT. */ \
+do { \
+ int t1h; \
+ int t3h; \
+ int t5h; \
+ int t7h; \
+ int t9h; \
+ int tbh; \
+ int tdh; \
+ int tfh; \
+ int thh; \
+ int tth; \
+ int tvh; \
+ OD_IDST_16_ASYM(tv, tvh, tn, tr, tj, tt, tth, tl, tp, th, thh, \
+ tu, tm, tq, ti, ts, tk, to, tg); \
+ OD_IDCT_16_ASYM(t0, t8, t4, tc, t2, ta, t6, te, \
+ t1, t1h, t9, t9h, t5, t5h, td, tdh, t3, t3h, tb, tbh, t7, t7h, tf, tfh); \
+ tu = t1h - tu; \
+ t1 -= tu; \
+ te += thh; \
+ th = te - th; \
+ tm = t9h - tm; \
+ t9 -= tm; \
+ t6 += DAALA_DCT_RSHIFT(tp, 1); \
+ tp = t6 - tp; \
+ tq = t5h - tq; \
+ t5 -= tq; \
+ ta += DAALA_DCT_RSHIFT(tl, 1); \
+ tl = ta - tl; \
+ ti = tdh - ti; \
+ td -= ti; \
+ t2 += tth; \
+ tt = t2 - tt; \
+ ts = t3h - ts; \
+ t3 -= ts; \
+ tc += DAALA_DCT_RSHIFT(tj, 1); \
+ tj = tc - tj; \
+ tk = tbh - tk; \
+ tb -= tk; \
+ t4 += DAALA_DCT_RSHIFT(tr, 1); \
+ tr = t4 - tr; \
+ to = t7h - to; \
+ t7 -= to; \
+ t8 += DAALA_DCT_RSHIFT(tn, 1); \
+ tn = t8 - tn; \
+ tg = tfh - tg; \
+ tf -= tg; \
+ t0 += tvh; \
+ tv = t0 - tv; \
+ } \
+ while (0)
+
+static av_always_inline void idct_1D_32(pixel *x, int xstride, const pixel y[32])
+{
+ int t0 = y[0], tg = y[1], t8 = y[2], to = y[3], t4 = y[4], tk = y[5];
+ int tc = y[6], ts = y[7], t2 = y[8], ti = y[9], ta = y[10], tq = y[11];
+ int t6 = y[12], tm = y[13], te = y[14], tu = y[15], t1 = y[16], th = y[17];
+ int t9 = y[18], tp = y[19], t5 = y[20], tl = y[21], td = y[22], tt = y[23];
+ int t3 = y[24], tj = y[25], tb = y[26], tr = y[27], t7 = y[28], tn = y[29];
+ int tf = y[30], tv = y[31];
+ OD_IDCT_32(t0, tg, t8, to, t4, tk, tc, ts, t2, ti, ta, tq, t6, tm, te, tu,
+ t1, th, t9, tp, t5, tl, td, tt, t3, tj, tb, tr, t7, tn, tf, tv);
+ x[0*xstride] = (pixel)t0;
+ x[1*xstride] = (pixel)t1;
+ x[2*xstride] = (pixel)t2;
+ x[3*xstride] = (pixel)t3;
+ x[4*xstride] = (pixel)t4;
+ x[5*xstride] = (pixel)t5;
+ x[6*xstride] = (pixel)t6;
+ x[7*xstride] = (pixel)t7;
+ x[8*xstride] = (pixel)t8;
+ x[9*xstride] = (pixel)t9;
+ x[10*xstride] = (pixel)ta;
+ x[11*xstride] = (pixel)tb;
+ x[12*xstride] = (pixel)tc;
+ x[13*xstride] = (pixel)td;
+ x[14*xstride] = (pixel)te;
+ x[15*xstride] = (pixel)tf;
+ x[16*xstride] = (pixel)tg;
+ x[17*xstride] = (pixel)th;
+ x[18*xstride] = (pixel)ti;
+ x[19*xstride] = (pixel)tj;
+ x[20*xstride] = (pixel)tk;
+ x[21*xstride] = (pixel)tl;
+ x[22*xstride] = (pixel)tm;
+ x[23*xstride] = (pixel)tn;
+ x[24*xstride] = (pixel)to;
+ x[25*xstride] = (pixel)tp;
+ x[26*xstride] = (pixel)tq;
+ x[27*xstride] = (pixel)tr;
+ x[28*xstride] = (pixel)ts;
+ x[29*xstride] = (pixel)tt;
+ x[30*xstride] = (pixel)tu;
+ x[31*xstride] = (pixel)tv;
+}
+
+#define OD_IDST_2_ASYM(p0, p1) \
+/* Embedded 2-point asymmetric Type-IV iDST. */ \
+do { \
+ /* 4573/4096 ~= 4*Sin[Pi/8] - Tan[Pi/8] ~= 1.11652016708726 */ \
+ p0 += (p1*4573 + 2048) >> 12; \
+ /* 669/1024 ~= Cos[Pi/8]/Sqrt[2] ~= 0.653281482438188 */ \
+ p1 -= (p0*669 + 512) >> 10; \
+ /* 11507/16384 ~= 4*Sin[Pi/8] - 2*Tan[Pi/8] ~= 0.702306604714169 */ \
+ p0 += (p1*11507 + 8192) >> 14; \
+ } \
+ while (0)
+
+#define OD_IDCT_2_ASYM(p0, p1, p1h) \
+/* Embedded 2-point asymmetric Type-II iDCT. */ \
+do { \
+ p1 = p0 - p1; \
+ p1h = DAALA_DCT_RSHIFT(p1, 1); \
+ p0 -= p1h; \
+ } \
+ while (0)
+
+#define OD_IDST_4(q0, q2, q1, q3) \
+ /* Embedded 4-point orthonormal Type-IV iDST. */ \
+ do { \
+ int q0h; \
+ int q2h; \
+ /* 8553/16384 ~= (1/Sqrt[2] - Cos[7*Pi/16])/Sin[7*Pi/16] ~=
+ 0.52204745462729 */ \
+ q3 -= (q0*8553 + 8192) >> 14; \
+ /* 5681/4096 ~= Sqrt[2]*Sin[7*Pi/16] ~= 1.38703984532215 */ \
+ q0 += (q3*5681 + 2048) >> 12; \
+ /* 5091/8192 ~= (1/Sqrt[2] - Cos[7*Pi/16]/2)/Sin[7*Pi/16] ~=
+ 0.6215036383171189 */ \
+ q3 -= (q0*5091 + 4096) >> 13; \
+ /* 7335/32768 ~= (1/Sqrt[2] - Cos[3*Pi/16])/Sin[3*Pi/16] ~=
+ 0.223847182092655 */ \
+ q1 -= (q2*7335 + 16384) >> 15; \
+ /* 1609/2048 ~= Sqrt[2]*Sin[3*Pi/16] ~= 0.785694958387102 */ \
+ q2 -= (q1*1609 + 1024) >> 11; \
+ /* 537/1024 ~= (1/Sqrt[2] - Cos[3*Pi/16]/2)/Sin[3*Pi/16] ~=
+ 0.524455699240090 */ \
+ q1 += (q2*537 + 512) >> 10; \
+ q2h = DAALA_DCT_RSHIFT(q2, 1); \
+ q3 += q2h; \
+ q2 -= q3; \
+ q0h = DAALA_DCT_RSHIFT(q0, 1); \
+ q1 = q0h - q1; \
+ q0 -= q1; \
+ /* 3393/8192 ~= Tan[Pi/8] ~= 0.414213562373095 */ \
+ q1 -= (q2*3393 + 4096) >> 13; \
+ /* 5793/8192 ~= Sin[Pi/4] ~= 0.707106781186547 */ \
+ q2 += (q1*5793 + 4096) >> 13; \
+ /* 13573/32768 ~= Tan[Pi/8] ~= 0.414213562373095 */ \
+ q1 -= (q2*13573 + 16384) >> 15; \
+ } \
+ while (0)
+
+#define OD_IDCT_4(q0, q2, q1, q3) \
+/* Embedded 4-point orthonormal Type-II iDCT. */ \
+do { \
+ int q1h; \
+ int q3h; \
+ OD_IDST_2_ASYM(q3, q2); \
+ OD_IDCT_2_ASYM(q0, q1, q1h); \
+ q3h = DAALA_DCT_RSHIFT(q3, 1); \
+ q0 += q3h; \
+ q3 = q0 - q3; \
+ q2 = q1h - q2; \
+ q1 -= q2; \
+ } \
+ while (0)
+
+#define OD_IDCT_8_ASYM(r0, r4, r2, r6, r1, r1h, r5, r5h, r3, r3h, r7, r7h) \
+/* Embedded 8-point asymmetric Type-II iDCT. */ \
+do { \
+ OD_IDST_4(r7, r5, r6, r4); \
+ OD_IDCT_4(r0, r2, r1, r3); \
+ r7 = r0 - r7; \
+ r7h = DAALA_DCT_RSHIFT(r7, 1); \
+ r0 -= r7h; \
+ r1 += r6; \
+ r1h = DAALA_DCT_RSHIFT(r1, 1); \
+ r6 = r1h - r6; \
+ r5 = r2 - r5; \
+ r5h = DAALA_DCT_RSHIFT(r5, 1); \
+ r2 -= r5h; \
+ r3 += r4; \
+ r3h = DAALA_DCT_RSHIFT(r3, 1); \
+ r4 = r3h - r4; \
+ } \
+ while (0)
+
+#define OD_IDST_8_ASYM(t0, t4, t2, t6, t1, t5, t3, t7) \
+ /* Embedded 8-point asymmetric Type-IV iDST. */ \
+ do { \
+ int t0h; \
+ int t2h; \
+ int t5h__; \
+ int t7h__; \
+ /* 3393/8192 ~= Tan[Pi/8] ~= 0.414213562373095 */ \
+ t6 -= (t1*3393 + 4096) >> 13; \
+ /* 5793/8192 ~= Sin[Pi/4] ~= 0.707106781186547 */ \
+ t1 += (t6*5793 + 4096) >> 13; \
+ /* 3393/8192 ~= Tan[Pi/8] ~= 0.414213562373095 */ \
+ t6 -= (t1*3393 + 4096) >> 13; \
+ /* 2737/4096 ~= Tan[3*Pi/16] ~= 0.668178637919299 */ \
+ t5 -= (t2*2737 + 2048) >> 12; \
+ /* 473/512 ~= Sin[3*Pi/8] ~= 0.923879532511287 */ \
+ t2 += (t5*473 + 256) >> 9; \
+ /* 2737/4096 ~= Tan[3*Pi/16] ~= 0.668178637919299 */ \
+ t5 -= (t2*2737 + 2048) >> 12; \
+ /* 3259/16384 ~= Tan[Pi/16] ~= 0.198912367379658 */ \
+ t4 -= (t3*3259 + 8192) >> 14; \
+ /* 3135/8192 ~= Sin[Pi/8] ~= 0.382683432365090 */ \
+ t3 += (t4*3135 + 4096) >> 13; \
+ /* 3259/16384 ~= Tan[Pi/16] ~= 0.198912367379658 */ \
+ t4 -= (t3*3259 + 8192) >> 14; \
+ t0 -= t6; \
+ t0h = DAALA_DCT_RSHIFT(t0, 1); \
+ t6 += t0h; \
+ t2 = t3 - t2; \
+ t2h = DAALA_DCT_RSHIFT(t2, 1); \
+ t3 -= t2h; \
+ t5 = t4 - t5; \
+ t5h__ = DAALA_DCT_RSHIFT(t5, 1); \
+ t4 -= t5h__; \
+ t7 += t1; \
+ t7h__ = DAALA_DCT_RSHIFT(t7, 1); \
+ t1 = t7h__ - t1; \
+ t3 = t7h__ - t3; \
+ t7 -= t3; \
+ t1 -= t5h__; \
+ t5 += t1; \
+ t6 -= t2h; \
+ t2 += t6; \
+ t4 += t0h; \
+ t0 -= t4; \
+ /* 20055/32768 ~= (1/Sqrt[2] - Sin[Pi/32])/Cos[Pi/32] ~=
+ 0.612036765167935 */ \
+ t7 -= (t0*20055 + 16384) >> 15; \
+ /* 11529/8192 ~= Sqrt[2]*Cos[Pi/32] ~= 1.40740373752638 */ \
+ t0 += (t7*11529 + 4096) >> 13; \
+ /* 5417/8192 ~= (Sqrt[2] - Sin[Pi/32])/(2*Cos[Pi/32]) ~=
+ 0.661282466846517 */ \
+ t7 -= (t0*5417 + 4096) >> 13; \
+ /* 3525/4096 ~= (Cos[3*Pi/32] - 1/Sqrt[2])/Sin[3*Pi/32] ~=
+ 0.860650162139486 */ \
+ t1 += (t6*3525 + 2048) >> 12; \
+ /* 3363/8192 ~= Sqrt[2]*Sin[3*Pi/32] ~= 0.410524527522357 */ \
+ t6 += (t1*3363 + 4096) >> 13; \
+ /* 12905/16384 ~= (1/Sqrt[2] - Cos[3*Pi/32]/1)/Sin[3*Pi/32] ~=
+ 0.787628942329675 */ \
+ t1 -= (t6*12905 + 8192) >> 14; \
+ /* 4379/16384 ~= (1/Sqrt[2] - Sin[5*Pi/32])/Cos[5*Pi/32] ~=
+ 0.267268807193026 */ \
+ t5 -= (t2*4379 + 8192) >> 14; \
+ /* 10217/8192 ~= Sqrt[2]*Cos[5*Pi/32] ~= 1.24722501298667 */ \
+ t2 += (t5*10217 + 4096) >> 13; \
+ /* 4379/8192 ~= (Sqrt[2] - Sin[5*Pi/32])/(2*Cos[5*Pi/32]) ~=
+ 0.534524375168421 */ \
+ t5 -= (t2*4379 + 4096) >> 13; \
+ /* 851/8192 ~= (Cos[7*Pi/32] - 1/Sqrt[2])/Sin[7*Pi/32] ~=
+ 0.103884567856159 */ \
+ t3 += (t4*851 + 4096) >> 13; \
+ /* 14699/16384 ~= Sqrt[2]*Sin[7*Pi/32] ~= 0.897167586342636 */ \
+ t4 += (t3*14699 + 8192) >> 14; \
+ /* 1035/2048 ~= (Sqrt[2] - Cos[7*Pi/32])/(2*Sin[7*Pi/32]) ~=
+ 0.505367194937830 */ \
+ t3 -= (t4*1035 + 1024) >> 11; \
+ } \
+ while (0)
+
+#define OD_IDCT_16(s0, s8, s4, sc, s2, sa, s6, se, \
+s1, s9, s5, sd, s3, sb, s7, sf) \
+/* Embedded 16-point orthonormal Type-II iDCT. */ \
+do { \
+ int s1h; \
+ int s3h; \
+ int s5h; \
+ int s7h; \
+ int sfh; \
+ OD_IDST_8_ASYM(sf, sb, sd, s9, se, sa, sc, s8); \
+ OD_IDCT_8_ASYM(s0, s4, s2, s6, s1, s1h, s5, s5h, s3, s3h, s7, s7h); \
+ sfh = DAALA_DCT_RSHIFT(sf, 1); \
+ s0 += sfh; \
+ sf = s0 - sf; \
+ se = s1h - se; \
+ s1 -= se; \
+ s2 += DAALA_DCT_RSHIFT(sd, 1); \
+ sd = s2 - sd; \
+ sc = s3h - sc; \
+ s3 -= sc; \
+ s4 += DAALA_DCT_RSHIFT(sb, 1); \
+ sb = s4 - sb; \
+ sa = s5h - sa; \
+ s5 -= sa; \
+ s6 += DAALA_DCT_RSHIFT(s9, 1); \
+ s9 = s6 - s9; \
+ s8 = s7h - s8; \
+ s7 -= s8; \
+ } \
+ while (0)
+
+#define OD_IDST_16(s0, s8, s4, sc, s2, sa, s6, se, \
+ s1, s9, s5, sd, s3, sb, s7, sf) \
+ /* Embedded 16-point orthonormal Type-IV iDST. */ \
+ do { \
+ int s0h; \
+ int s4h; \
+ int sbh; \
+ int sfh; \
+ /* 1651/32768 ~= (1/Sqrt[2] - Cos[15*Pi/64])/Sin[15*Pi/64] ~=
+ 0.05039668360333519 */ \
+ se += (s1*1651 + 16384) >> 15; \
+ /* 1945/2048 ~= Sqrt[2]*Sin[15*Pi/64] ~= 0.9497277818777543 */ \
+ s1 += (se*1945 + 1024) >> 11; \
+ /* 2053/4096 ~= (1/Sqrt[2] - Cos[15*Pi/64]/2)/Sin[15*Pi/64] ~=
+ 0.5012683042634027 */ \
+ se -= (s1*2053 + 2048) >> 12; \
+ /* 4545/32768 ~= (1/Sqrt[2] - Cos[19*Pi/64])/Sin[19*Pi/64] ~=
+ 0.13870322715817154 */ \
+ s6 += (s9*4545 + 16384) >> 15; \
+ /* 4653/32768 ~= Sqrt[2]*Sin[19*Pi/64] ~= 1.1359069844201428 */ \
+ s9 -= (s6*4653 + 2048) >> 12; \
+ /* 2087/4096 ~= (1/Sqrt[2] - Cos[19*Pi/64]/2)/Sin[19*Pi/64] ~=
+ 0.5095285002941893 */ \
+ s6 += (s9*2087 + 2048) >> 12; \
+ /* 75/256 ~= (1/Sqrt[2] - Cos[11*Pi/64])/Sin[11*Pi/64] ~=
+ 0.2929800132658202 */ \
+ s5 -= (sa*75 + 128) >> 8; \
+ /* 1489/2048 ~= Sqrt[2]*Sin[11*Pi/64] ~= 0.72705107329128 */ \
+ sa -= (s5*1489 + 1024) >> 11; \
+ /* 2217/4096 ~= (1/Sqrt[2] - Cos[11*Pi/64]/2)/Sin[11*Pi/64] ~=
+ 0.5412195895259334 */ \
+ s5 += (sa*2217 + 2048) >> 12; \
+ /* 5067/16384 ~= (1/Sqrt[2] - Cos[23*Pi/64])/Sin[23*Pi/64] ~=
+ 0.30924225528198984 */ \
+ sd -= (s2*5067 + 8192) >> 14; \
+ /* 1309/1024 ~= Sqrt[2]*Sin[23*Pi/64] ~= 1.278433918575241 */ \
+ s2 += (sd*1309 + 512) >> 10; \
+ /* 4471/8192 ~= (1/Sqrt[2] - Cos[23*Pi/64]/2)/Sin[23*Pi/64] ~=
+ 0.5457246432276498 */ \
+ sd -= (s2*4471 + 4096) >> 13; \
+ /* 5701/8192 ~= (1/Sqrt[2] - Cos[7*Pi/64])/Sin[7*Pi/64] ~=
+ 0.6958870433047222 */ \
+ s3 -= (sc*5701 + 4096) >> 13; \
+ /* 3903/8192 ~= Sqrt[2]*Sin[7*Pi/64] ~= 0.47643419969316125 */ \
+ sc -= (s3*3903 + 4096) >> 13; \
+ /* 5747/8192 ~= (1/Sqrt[2] - Cos[7*Pi/64]/2)/Sin[7*Pi/64] ~=
+ 0.7015193429405162 */ \
+ s3 += (sc*5747 + 4096) >> 13; \
+ /* 7839/16384 ~= (1/Sqrt[2] - Cos[27*Pi/64])/Sin[27*Pi/64] ~=
+ 0.47846561618999817 */ \
+ sb -= (s4*7839 + 8192) >> 14; \
+ /* 5619/4096 ~= Sqrt[2]*Sin[27*Pi/64] ~= 1.371831354193494 */ \
+ s4 += (sb*5619 + 2048) >> 12; \
+ /* 2473/4096 ~= (1/Sqrt[2] - Cos[27*Pi/64]/2)/Sin[27*Pi/64] ~=
+ 0.603709096285651 */ \
+ sb -= (s4*2473 + 2048) >> 12; \
+ /* 4641/8192 ~= (1/Sqrt[2] - Cos[29*Pi/64])/Sin[29*Pi/64] ~=
+ 0.5665078993345056 */ \
+ s7 -= (s8*4641 + 4096) >> 13; \
+ /* 2865/2048 ~= Sqrt[2]*Sin[29*Pi/64] ~= 1.3989068359730783 */ \
+ s8 += (s7*2865 + 1024) >> 11; \
+ /* 41/64 ~= (1/Sqrt[2] - Cos[29*Pi/64]/2)/Sin[29*Pi/64] ~=
+ 0.6406758931036793 */ \
+ s7 -= (s8*41 + 32) >> 6; \
+ /* 5397/8192 ~= (Cos[Pi/4] - Cos[31*Pi/64])/Sin[31*Pi/64] ~=
+ 0.6588326996993819 */ \
+ s0 += (sf*5397 + 4096) >> 13; \
+ /* 2893/2048 ~= Sqrt[2]*Sin[31*Pi/64] ~= 1.4125100802019777 */ \
+ sf -= (s0*2893 + 1024) >> 11; \
+ /* 2799/4096 ~= (1/Sqrt[2] - Cos[31*Pi/64]/2)/Sin[31*Pi/64] ~=
+ 0.6833961245841154 */ \
+ s0 += (sf*2799 + 2048) >> 12; \
+ sd -= DAALA_DCT_RSHIFT(sc, 1); \
+ sc += sd; \
+ s3 += DAALA_DCT_RSHIFT(s2, 1); \
+ s2 = s3 - s2; \
+ sb += DAALA_DCT_RSHIFT(sa, 1); \
+ sa -= sb; \
+ s5 = DAALA_DCT_RSHIFT(s4, 1) - s5; \
+ s4 -= s5; \
+ s7 = DAALA_DCT_RSHIFT(s9, 1) - s7; \
+ s9 -= s7; \
+ s6 -= DAALA_DCT_RSHIFT(s8, 1); \
+ s8 += s6; \
+ se = DAALA_DCT_RSHIFT(sf, 1) - se; \
+ sf -= se; \
+ s0 += DAALA_DCT_RSHIFT(s1, 1); \
+ s1 -= s0; \
+ s5 -= s9; \
+ s9 += DAALA_DCT_RSHIFT(s5, 1); \
+ sa = s6 - sa; \
+ s6 -= DAALA_DCT_RSHIFT(sa, 1); \
+ se += s2; \
+ s2 -= DAALA_DCT_RSHIFT(se, 1); \
+ s1 = sd - s1; \
+ sd -= DAALA_DCT_RSHIFT(s1, 1); \
+ s0 += s3; \
+ s0h = DAALA_DCT_RSHIFT(s0, 1); \
+ s3 = s0h - s3; \
+ sf += sc; \
+ sfh = DAALA_DCT_RSHIFT(sf, 1); \
+ sc -= sfh; \
+ sb = s7 - sb; \
+ sbh = DAALA_DCT_RSHIFT(sb, 1); \
+ s7 -= sbh; \
+ s4 -= s8; \
+ s4h = DAALA_DCT_RSHIFT(s4, 1); \
+ s8 += s4h; \
+ /* 3227/32768 ~= Tan[Pi/32] ~= 0.09849140335716425 */ \
+ se -= (s1*3227 + 16384) >> 15; \
+ /* 6393/32768 ~= Sin[Pi/16] ~= 0.19509032201612825 */ \
+ s1 += (se*6393 + 16384) >> 15; \
+ /* 3227/32768 ~= Tan[Pi/32] ~= 0.09849140335716425 */ \
+ se -= (s1*3227 + 16384) >> 15; \
+ /* 2485/8192 ~= Tan[3*Pi/32] ~= 0.303346683607342 */ \
+ s6 -= (s9*2485 + 4096) >> 13; \
+ /* 18205/32768 ~= Sin[3*Pi/16] ~= 0.555570233019602 */ \
+ s9 += (s6*18205 + 16384) >> 15; \
+ /* 2485/8192 ~= Tan[3*Pi/32] ~= 0.303346683607342 */ \
+ s6 -= (s9*2485 + 4096) >> 13; \
+ /* 8757/16384 ~= Tan[5*Pi/32] ~= 0.534511135950792 */ \
+ s5 -= (sa*8757 + 8192) >> 14; \
+ /* 6811/8192 ~= Sin[5*Pi/16] ~= 0.831469612302545 */ \
+ sa += (s5*6811 + 4096) >> 13; \
+ /* 8757/16384 ~= Tan[5*Pi/32]) ~= 0.534511135950792 */ \
+ s5 -= (sa*8757 + 8192) >> 14; \
+ /* 6723/8192 ~= Tan[7*Pi/32]) ~= 0.820678790828660 */ \
+ s2 -= (sd*6723 + 4096) >> 13; \
+ /* 16069/16384 ~= Sin[7*Pi/16] ~= 0.980785280403230 */ \
+ sd += (s2*16069 + 8192) >> 14; \
+ /* 6723/8192 ~= Tan[7*Pi/32] ~= 0.820678790828660 */ \
+ s2 -= (sd*6723 + 4096) >> 13; \
+ s9 += DAALA_DCT_RSHIFT(se, 1); \
+ se = s9 - se; \
+ s6 += DAALA_DCT_RSHIFT(s1, 1); \
+ s1 -= s6; \
+ sd = DAALA_DCT_RSHIFT(sa, 1) - sd; \
+ sa -= sd; \
+ s2 += DAALA_DCT_RSHIFT(s5, 1); \
+ s5 = s2 - s5; \
+ s3 -= sbh; \
+ sb += s3; \
+ sc += s4h; \
+ s4 = sc - s4; \
+ s8 = s0h - s8; \
+ s0 -= s8; \
+ s7 = sfh - s7; \
+ sf -= s7; \
+ /* 13573/32768 ~= Tan[pi/8] ~= 0.414213562373095 */ \
+ s6 -= (s9*13573 + 16384) >> 15; \
+ /* 11585/16384 ~= Sin[pi/4] ~= 0.707106781186547 */ \
+ s9 += (s6*11585 + 8192) >> 14; \
+ /* 13573/32768 ~= Tan[pi/8] ~= 0.414213562373095 */ \
+ s6 -= (s9*13573 + 16384) >> 15; \
+ /* 13573/32768 ~= Tan[pi/8] ~= 0.414213562373095 */ \
+ s5 -= (sa*13573 + 16384) >> 15; \
+ /* 11585/16384 ~= Sin[pi/4] ~= 0.707106781186547 */ \
+ sa += (s5*11585 + 8192) >> 14; \
+ /* 13573/32768 ~= Tan[pi/8] ~= 0.414213562373095 */ \
+ s5 -= (sa*13573 + 16384) >> 15; \
+ /* 3259/16384 ~= Tan[Pi/16] ~= 0.198912367379658 */ \
+ s3 -= (sc*3259 + 8192) >> 14; \
+ /* 3135/8192 ~= Sin[Pi/8] ~= 0.382683432365090 */ \
+ sc += (s3*3135 + 4096) >> 13; \
+ /* 3259/16384 ~= Tan[Pi/16] ~= 0.198912367379658 */ \
+ s3 -= (sc*3259 + 8192) >> 14; \
+ /* 21895/32768 ~= Tan[3*Pi/16] ~= 0.668178637919299 */ \
+ sb -= (s4*21895 + 16384) >> 15; \
+ /* 15137/16384 ~= Sin[3*Pi/8] ~= 0.923879532511287 */ \
+ s4 += (sb*15137 + 8192) >> 14; \
+ /* 21895/32768 ~= Tan[3*Pi/16] ~= 0.668178637919299 */ \
+ sb -= (s4*21895 + 16384) >> 15; \
+ /* 13573/32768 ~= Tan[pi/8] ~= 0.414213562373095 */ \
+ s8 -= (s7*13573 + 16384) >> 15; \
+ /* 11585/16384 ~= Sin[pi/4] ~= 0.707106781186547 */ \
+ s7 += (s8*11585 + 8192) >> 14; \
+ /* 13573/32768 ~= Tan[pi/8] ~= 0.414213562373095 */ \
+ s8 -= (s7*13573 + 16384) >> 15; \
+ } \
+ while (0)
+
+#define OD_IDCT_32_ASYM(t0, tg, t8, to, t4, tk, tc, ts, t2, ti, ta, tq, \
+t6, tm, te, tu, t1, t1h, th, thh, t9, t9h, tp, tph, t5, t5h, tl, tlh, \
+td, tdh, tt, tth, t3, t3h, tj, tjh, tb, tbh, tr, trh, t7, t7h, tn, tnh, \
+tf, tfh, tv, tvh) \
+/* Embedded 32-point asymmetric Type-II iDCT. */ \
+do { \
+ OD_IDST_16(tv, tn, tr, tj, tt, tl, tp, th, \
+ tu, tm, tq, ti, ts, tk, to, tg); \
+ OD_IDCT_16(t0, t8, t4, tc, t2, ta, t6, te, \
+ t1, t9, t5, td, t3, tb, t7, tf); \
+ tv = t0 - tv; \
+ tvh = DAALA_DCT_RSHIFT(tv, 1); \
+ t0 -= tvh; \
+ t1 += tu; \
+ t1h = DAALA_DCT_RSHIFT(t1, 1); \
+ tu = t1h - tu; \
+ tt = t2 - tt; \
+ tth = DAALA_DCT_RSHIFT(tt, 1); \
+ t2 -= tth; \
+ t3 += ts; \
+ t3h = DAALA_DCT_RSHIFT(t3, 1); \
+ ts = t3h - ts; \
+ tr = t4 - tr; \
+ trh = DAALA_DCT_RSHIFT(tr, 1); \
+ t4 -= trh; \
+ t5 += tq; \
+ t5h = DAALA_DCT_RSHIFT(t5, 1); \
+ tq = t5h - tq; \
+ tp = t6 - tp; \
+ tph = DAALA_DCT_RSHIFT(tp, 1); \
+ t6 -= tph; \
+ t7 += to; \
+ t7h = DAALA_DCT_RSHIFT(t7, 1); \
+ to = t7h - to; \
+ tn = t8 - tn; \
+ tnh = DAALA_DCT_RSHIFT(tn, 1); \
+ t8 -= tnh; \
+ t9 += tm; \
+ t9h = DAALA_DCT_RSHIFT(t9, 1); \
+ tm = t9h - tm; \
+ tl = ta - tl; \
+ tlh = DAALA_DCT_RSHIFT(tl, 1); \
+ ta -= tlh; \
+ tb += tk; \
+ tbh = DAALA_DCT_RSHIFT(tb, 1); \
+ tk = tbh - tk; \
+ tj = tc - tj; \
+ tjh = DAALA_DCT_RSHIFT(tj, 1); \
+ tc -= tjh; \
+ td += ti; \
+ tdh = DAALA_DCT_RSHIFT(td, 1); \
+ ti = tdh - ti; \
+ th = te - th; \
+ thh = DAALA_DCT_RSHIFT(th, 1); \
+ te -= thh; \
+ tf += tg; \
+ tfh = DAALA_DCT_RSHIFT(tf, 1); \
+ tg = tfh - tg; \
+ } \
+ while (0)
+
+#define OD_IDST_32_ASYM(t0, tg, t8, to, t4, tk, tc, ts, t2, ti, ta, tq, t6, \
+ tm, te, tu, t1, th, t9, tp, t5, tl, td, tt, t3, tj, tb, tr, t7, tn, tf, tv) \
+ /* Embedded 32-point asymmetric Type-IV iDST. */ \
+ do { \
+ int t0h; \
+ int t4h; \
+ int tbh; \
+ int tfh; \
+ int tgh; \
+ int tkh; \
+ int trh; \
+ int tvh; \
+ /* 1301/2048 ~= (1/Sqrt[2] - Cos[61*Pi/128])/Sin[61*Pi/128] ~=
+ 0.6352634915376478 */ \
+ tf -= (tg*1301 + 1024) >> 11; \
+ /* 5777/4096 ~= Sqrt[2]*Sin[61*Pi/128] ~= 1.4103816894602614 */ \
+ tg += (tf*5777 + 2048) >> 12; \
+ /* 2753/4096 ~= (1/Sqrt[2] - Cos[61*Pi/128]/2)/Sin[61*Pi/128] ~=
+ 0.6721457072988726 */ \
+ tf -= (tg*2753 + 2048) >> 12; \
+ /* -2513/32768 ~= (1/Sqrt[2] - Cos[29*Pi/128])/Sin[29*Pi/128] ~=
+ -0.07670567731102484 */ \
+ th -= (te*2513 + 16384) >> 15; \
+ /* 7567/8192 ~= Sqrt[2]*Sin[29*Pi/128] ~= 0.9237258930790229 */ \
+ te -= (th*7567 + 4096) >> 13; \
+ /* 515/1024 ~= (1/Sqrt[2] - Cos[29*Pi/128]/2)/Sin[29*Pi/128] ~=
+ 0.5029332763556925 */ \
+ th += (te*515 + 512) >> 10; \
+ /* -6331/8192 ~= (1/Sqrt[2] - Cos[13*Pi/128])/Sin[13*Pi/128] ~=
+ -0.772825983107003 */ \
+ tj -= (tc*6331 + 4096) >> 13; \
+ /* 1817/4096 ~= Sqrt[2]*Sin[13*Pi/128] ~= 0.4436129715409088 */ \
+ tc -= (tj*1817 + 2048) >> 12; \
+ /* 1517/2048 ~= (1/Sqrt[2] - Cos[13*Pi/128]/2)/Sin[13*Pi/128] ~=
+ 0.7406956190518837 */ \
+ tj += (tc*1517 + 1024) >> 11; \
+ /* -3391/8192 ~= (1/Sqrt[2] - Cos[19*Pi/128])/Sin[19*Pi/128] ~=
+ -0.41395202418930155 */ \
+ ti += (td*3391 + 4096) >> 13; \
+ /* 5209/8192 ~= Sqrt[2]*Sin[19*Pi/128] ~= 0.6358464401941452 */ \
+ td += (ti*5209 + 4096) >> 13; \
+ /* 2373/4096 ~= (1/Sqrt[2] - Cos[19*Pi/128]/2)/Sin[19*Pi/128] ~=
+ 0.5793773719823809 */ \
+ ti -= (td*2373 + 2048) >> 12; \
+ /* 8195/16384 ~= (1/Sqrt[2] - Cos[55*Pi/128])/Sin[55*Pi/128] ~=
+ 0.5001583229201391 */ \
+ tr -= (t4*8195 + 8192) >> 14; \
+ /* 1413/1024 ~= Sqrt[2]*Sin[55*Pi/128] ~= 1.3798511851368045 */ \
+ t4 += (tr*1413 + 512) >> 10; \
+ /* 5017/8192 ~= (1/Sqrt[2] - Cos[55*Pi/128]/2)/Sin[55*Pi/128] ~=
+ 0.6124370775787037 */ \
+ tr -= (t4*5017 + 4096) >> 13; \
+ /* -8437/32768 ~= (1/Sqrt[2] - Cos[23*Pi/128])/Sin[23*Pi/128] ~=
+ -0.2574717698598901 */ \
+ t5 -= (tq*8437 + 16384) >> 15; \
+ /* 3099/4096 ~= Sqrt[2]*Sin[23*Pi/128] ~= 0.7566008898816587 */ \
+ tq -= (t5*3099 + 2048) >> 12; \
+ /* 4359/8192 ~= (1/Sqrt[2] - Cos[23*Pi/128]/2)/Sin[23*Pi/128] ~=
+ 0.5321145141202145 */ \
+ t5 += (tq*4359 + 4096) >> 13; \
+ /* -6287/32768 ~= (1/Sqrt[2] - Cos[25*Pi/128])/Sin[25*Pi/128] ~=
+ -0.19186603041023065 */ \
+ tp -= (t6*6287 + 16384) >> 15; \
+ /* 6671/8192 ~= Sqrt[2]*Sin[25*Pi/128] ~= 0.8143157536286402 */ \
+ t6 -= (tp*6671 + 4096) >> 13; \
+ /* 1061/2048 ~= (1/Sqrt[2] - Cos[25*Pi/128]/2)/Sin[25*Pi/128] ~=
+ 0.5180794213368158 */ \
+ tp += (t6*1061 + 1024) >> 11; \
+ /* 2229/4096 ~= (1/Sqrt[2] - Cos[57*Pi/128])/Sin[57*Pi/128] ~=
+ 0.5441561539205226 */ \
+ t7 -= (to*2229 + 2048) >> 12; \
+ /* 5707/4096 ~= Sqrt[2]*Sin[57*Pi/128] ~= 1.3933930045694292 */ \
+ to += (t7*5707 + 2048) >> 12; \
+ /* 323/512 ~= (1/Sqrt[2] - Cos[57*Pi/128]/2)/Sin[57*Pi/128] ~=
+ 0.6309143839894504 */ \
+ t7 -= (to*323 + 256) >> 9; \
+ /* -1971/2048 ~= (1/Sqrt[2] - Cos[11*Pi/128])/Sin[11*Pi/128] ~=
+ -0.9623434853244648 */ \
+ tk += (tb*1971 + 1024) >> 11; \
+ /* 1545/4096 ~= Sqrt[2]*Sin[11*Pi/128] ~= 0.37718879887892737 */ \
+ tb += (tk*1545 + 2048) >> 12; \
+ /* 3459/4096 ~= (1/Sqrt[2] - Cos[11*Pi/128]/2)/Sin[11*Pi/128] ~=
+ 0.8444243553292501 */ \
+ tk -= (tb*3459 + 2048) >> 12; \
+ /* -5417/16384 ~= (1/Sqrt[2] - Cos[21*Pi/128])/Sin[21*Pi/128] ~=
+ -0.3306569439519963 */ \
+ tl -= (ta*5417 + 8192) >> 14; \
+ /* 2855/4096 ~= Sqrt[2]*Sin[21*Pi/128] ~= 0.6970633083205415 */ \
+ ta -= (tl*2855 + 2048) >> 12; \
+ /* 2261/4096 ~= (1/Sqrt[2] - Cos[21*Pi/128]/2)/Sin[21*Pi/128] ~=
+ 0.5519664910950994 */ \
+ tl += (ta*2261 + 2048) >> 12; \
+ /* -4327/32768 ~= (1/Sqrt[2] - Cos[27*Pi/128])/Sin[27*Pi/128] ~=
+ -0.13204726103773165 */ \
+ t9 -= (tm*4327 + 16384) >> 15; \
+ /* 891/1024 ~= Sqrt[2]*Sin[27*Pi/128] ~= 0.8700688593994939 */ \
+ tm -= (t9*891 + 512) >> 10; \
+ /* 4167/8192 ~= (1/Sqrt[2] - Cos[27*Pi/128]/2)/Sin[27*Pi/128] ~=
+ 0.5086435289805458 */ \
+ t9 += (tm*4167 + 4096) >> 13; \
+ /* 2413/4096 ~= (1/Sqrt[2] - Cos[59*Pi/128])/Sin[59*Pi/128] ~=
+ 0.5891266122920528 */ \
+ tn -= (t8*2413 + 2048) >> 12; \
+ /* 5749/4096 ~= Sqrt[2]*Sin[59*Pi/128] ~= 1.4035780182072333 */ \
+ t8 += (tn*5749 + 2048) >> 12; \
+ /* 5331/8192 ~= (1/Sqrt[2] - Cos[59*Pi/128]/2)/Sin[59*Pi/128] ~=
+ 0.6507957303604222 */ \
+ tn -= (t8*5331 + 4096) >> 13; \
+ /* -2571/4096 ~= (1/Sqrt[2] - Cos[15*Pi/128])/Sin[15*Pi/128] ~=
+ -0.6276441593165217 */ \
+ ts += (t3*2571 + 2048) >> 12; \
+ /* 8339/32768 ~= Sqrt[2]*Sin[15*Pi/128] ~= 0.5089684416985407 */ \
+ t3 += (ts*8339 + 8192) >> 14; \
+ /* 5477/8192 ~= (1/Sqrt[2] - Cos[15*Pi/128]/2)/Sin[15*Pi/128] ~=
+ 0.6685570995525147 */ \
+ ts -= (t3*5477 + 4096) >> 13; \
+ /* -8373/16384 ~= (1/Sqrt[2] - Cos[17*Pi/128])/Sin[17*Pi/128] ~=
+ -0.5110608601827629 */ \
+ tt -= (t2*8373 + 8192) >> 14; \
+ /* 4695/8192 ~= Sqrt[2]*Sin[17*Pi/128] ~= 0.5730977622997507 */ \
+ t2 -= (tt*4695 + 4096) >> 13; \
+ /* 2527/4096 ~= (1/Sqrt[2] - Cos[17*Pi/128]/2)/Sin[17*Pi/128] ~=
+ 0.6169210657818165 */ \
+ tt += (t2*2527 + 2048) >> 12; \
+ /* -815/32768 ~= (1/Sqrt[2] - Cos[31*Pi/128])/Sin[31*Pi/128] ~=
+ -0.02485756913896231 */ \
+ t1 -= (tu*815 + 16384) >> 15; \
+ /* 1997/2048 ~= Sqrt[2]*Sin[31*Pi/128] ~= 0.9751575901732918 */ \
+ tu -= (t1*1997 + 1024) >> 11; \
+ /* 8197/16384 ~= (1/Sqrt[2] - Cos[31*Pi/128]/2)/Sin[31*Pi/128] ~=
+ 0.5003088539809675 */ \
+ t1 += (tu*8197 + 8192) >> 14; \
+ /* 5593/8192 ~= (1/Sqrt[2] - Cos[63*Pi/128])/Sin[63*Pi/128] ~=
+ 0.6827711905810085 */ \
+ tv -= (t0*5593 + 4096) >> 13; \
+ /* 5791/4096 ~= Sqrt[2]*Sin[63*Pi/128] ~= 1.413787627688534 */ \
+ t0 += (tv*5791 + 2048) >> 12; \
+ /* 2847/4096 ~= (1/Sqrt[2] - Cos[63*Pi/128]/2)/Sin[63*Pi/128] ~=
+ 0.6950455016354713 */ \
+ tv -= (t0*2847 + 2048) >> 12; \
+ \
+ t7 = -t7; \
+ tf = -tf; \
+ tn = -tn; \
+ tr = -tr; \
+ \
+ t7 -= DAALA_DCT_RSHIFT(t6, 1); \
+ t6 += t7; \
+ tp -= DAALA_DCT_RSHIFT(to, 1); \
+ to += tp; \
+ tr -= DAALA_DCT_RSHIFT(tq, 1); \
+ tq += tr; \
+ t5 -= DAALA_DCT_RSHIFT(t4, 1); \
+ t4 += t5; \
+ tt -= DAALA_DCT_RSHIFT(t3, 1); \
+ t3 += tt; \
+ ts -= DAALA_DCT_RSHIFT(t2, 1); \
+ t2 += ts; \
+ tv += DAALA_DCT_RSHIFT(tu, 1); \
+ tu -= tv; \
+ t1 -= DAALA_DCT_RSHIFT(t0, 1); \
+ t0 += t1; \
+ th -= DAALA_DCT_RSHIFT(tg, 1); \
+ tg += th; \
+ tf -= DAALA_DCT_RSHIFT(te, 1); \
+ te += tf; \
+ ti += DAALA_DCT_RSHIFT(tc, 1); \
+ tc -= ti; \
+ tj += DAALA_DCT_RSHIFT(td, 1); \
+ td -= tj; \
+ tn -= DAALA_DCT_RSHIFT(tm, 1); \
+ tm += tn; \
+ t9 -= DAALA_DCT_RSHIFT(t8, 1); \
+ t8 += t9; \
+ tl -= DAALA_DCT_RSHIFT(tb, 1); \
+ tb += tl; \
+ tk -= DAALA_DCT_RSHIFT(ta, 1); \
+ ta += tk; \
+ \
+ ti -= th; \
+ th += DAALA_DCT_RSHIFT(ti, 1); \
+ td -= te; \
+ te += DAALA_DCT_RSHIFT(td, 1); \
+ tm += tl; \
+ tl -= DAALA_DCT_RSHIFT(tm, 1); \
+ t9 += ta; \
+ ta -= DAALA_DCT_RSHIFT(t9, 1); \
+ tp += tq; \
+ tq -= DAALA_DCT_RSHIFT(tp, 1); \
+ t6 += t5; \
+ t5 -= DAALA_DCT_RSHIFT(t6, 1); \
+ t2 -= t1; \
+ t1 += DAALA_DCT_RSHIFT(t2, 1); \
+ tt -= tu; \
+ tu += DAALA_DCT_RSHIFT(tt, 1); \
+ tr += t7; \
+ trh = DAALA_DCT_RSHIFT(tr, 1); \
+ t7 -= trh; \
+ t4 -= to; \
+ t4h = DAALA_DCT_RSHIFT(t4, 1); \
+ to += t4h; \
+ t0 += t3; \
+ t0h = DAALA_DCT_RSHIFT(t0, 1); \
+ t3 -= t0h; \
+ tv += ts; \
+ tvh = DAALA_DCT_RSHIFT(tv, 1); \
+ ts -= tvh; \
+ tf -= tc; \
+ tfh = DAALA_DCT_RSHIFT(tf, 1); \
+ tc += tfh; \
+ tg += tj; \
+ tgh = DAALA_DCT_RSHIFT(tg, 1); \
+ tj -= tgh; \
+ tb -= t8; \
+ tbh = DAALA_DCT_RSHIFT(tb, 1); \
+ t8 += tbh; \
+ tk += tn; \
+ tkh = DAALA_DCT_RSHIFT(tk, 1); \
+ tn -= tkh; \
+ \
+ ta = -ta; \
+ tq = -tq; \
+ \
+ /* 4861/32768 ~= Tan[3*Pi/64] ~= 0.14833598753834742 */ \
+ te -= (th*4861 + 16384) >> 15; \
+ /* 1189/4096 ~= Sin[3*Pi/32] ~= 0.29028467725446233 */ \
+ th += (te*1189 + 2048) >> 12; \
+ /* 4861/32768 ~= Tan[3*Pi/64] ~= 0.14833598753834742 */ \
+ te -= (th*4861 + 16384) >> 15; \
+ /* 513/2048 ~= Tan[5*Pi/64] ~= 0.25048696019130545 */ \
+ tm -= (t9*513 + 1024) >> 11; \
+ /* 7723/16384 ~= Sin[5*Pi/32] ~= 0.47139673682599764 */ \
+ t9 += (tm*7723 + 8192) >> 14; \
+ /* 513/2048 ~= Tan[5*Pi/64] ~= 0.25048696019130545 */ \
+ tm -= (t9*513 + 1024) >> 11; \
+ /* 11725/32768 ~= Tan[7*Pi/64] ~= 0.3578057213145241 */ \
+ t6 -= (tp*11725 + 16384) >> 15; \
+ /* 5197/8192 ~= Sin[7*Pi/32] ~= 0.6343932841636455 */ \
+ tp += (t6*5197 + 4096) >> 13; \
+ /* 11725/32768 ~= Tan[7*Pi/64] ~= 0.3578057213145241 */ \
+ t6 -= (tp*11725 + 16384) >> 15; \
+ /* 805/16384 ~= Tan[Pi/64] ~= 0.04912684976946793 */ \
+ tu -= (t1*805 + 8192) >> 14; \
+ /* 803/8192 ~= Sin[Pi/32] ~= 0.0980171403295606 */ \
+ t1 += (tu*803 + 4096) >> 13; \
+ /* 805/16384 ~= Tan[Pi/64] ~= 0.04912684976946793 */ \
+ tu -= (t1*805 + 8192) >> 14; \
+ /* 4861/32768 ~= Tan[3*Pi/64] ~= 0.14833598753834742 */ \
+ ti -= (td*4861 + 16384) >> 15; \
+ /* 1189/4096 ~= Sin[3*Pi/32] ~= 0.29028467725446233 */ \
+ td += (ti*1189 + 2048) >> 12; \
+ /* 4861/32768 ~= Tan[3*Pi/64] ~= 0.14833598753834742 */ \
+ ti -= (td*4861 + 16384) >> 15; \
+ /* 2455/4096 ~= Tan[11*Pi/64] ~= 0.5993769336819237 */ \
+ ta -= (tl*2455 + 2048) >> 12; \
+ /* 14449/16384 ~= Sin[11*Pi/32] ~= 0.881921264348355 */ \
+ tl += (ta*14449 + 8192) >> 14; \
+ /* 2455/4096 ~= Tan[11*Pi/64] ~= 0.5993769336819237 */ \
+ ta -= (tl*2455 + 2048) >> 12; \
+ /* 11725/32768 ~= Tan[7*Pi/64] ~= 0.3578057213145241 */ \
+ t5 -= (tq*11725 + 16384) >> 15; \
+ /* 5197/8192 ~= Sin[7*Pi/32] ~= 0.6343932841636455 */ \
+ tq += (t5*5197 + 4096) >> 13; \
+ /* 11725/32768 ~= Tan[7*Pi/64] ~= 0.3578057213145241 */ \
+ t5 -= (tq*11725 + 16384) >> 15; \
+ /* 805/16384 ~= Tan[Pi/64] ~= 0.04912684976946793 */ \
+ t2 -= (tt*805 + 8192) >> 14; \
+ /* 803/8192 ~= Sin[Pi/32] ~= 0.0980171403295606 */ \
+ tt += (t2*803 + 4096) >> 13; \
+ /* 805/16384 ~= Tan[Pi/64] ~= 0.04912684976946793 */ \
+ t2 -= (tt*805 + 8192) >> 14; \
+ \
+ tl = -tl; \
+ ti = -ti; \
+ \
+ th += DAALA_DCT_RSHIFT(t9, 1); \
+ t9 -= th; \
+ te -= DAALA_DCT_RSHIFT(tm, 1); \
+ tm += te; \
+ t1 += DAALA_DCT_RSHIFT(tp, 1); \
+ tp -= t1; \
+ tu -= DAALA_DCT_RSHIFT(t6, 1); \
+ t6 += tu; \
+ ta -= DAALA_DCT_RSHIFT(td, 1); \
+ td += ta; \
+ tl += DAALA_DCT_RSHIFT(ti, 1); \
+ ti -= tl; \
+ t5 += DAALA_DCT_RSHIFT(tt, 1); \
+ tt -= t5; \
+ tq += DAALA_DCT_RSHIFT(t2, 1); \
+ t2 -= tq; \
+ \
+ t8 -= tgh; \
+ tg += t8; \
+ tn += tfh; \
+ tf -= tn; \
+ t7 -= tvh; \
+ tv += t7; \
+ to -= t0h; \
+ t0 += to; \
+ tc += tbh; \
+ tb -= tc; \
+ tj += tkh; \
+ tk -= tj; \
+ ts += t4h; \
+ t4 -= ts; \
+ t3 += trh; \
+ tr -= t3; \
+ \
+ tk = -tk; \
+ \
+ /* 2485/8192 ~= Tan[3*Pi/32] ~= 0.303346683607342 */ \
+ tc -= (tj*2485 + 4096) >> 13; \
+ /* 18205/32768 ~= Sin[3*Pi/16] ~= 0.555570233019602 */ \
+ tj += (tc*18205 + 16384) >> 15; \
+ /* 2485/8192 ~= Tan[3*Pi/32] ~= 0.303346683607342 */ \
+ tc -= (tj*2485 + 4096) >> 13; \
+ /* 3227/32768 ~= Tan[Pi/32] ~= 0.09849140335716425 */ \
+ ts -= (t3*3227 + 16384) >> 15; \
+ /* 6393/32768 ~= Sin[Pi/16] ~= 0.19509032201612825 */ \
+ t3 += (ts*6393 + 16384) >> 15; \
+ /* 3227/32768 ~= Tan[Pi/32] ~= 0.09849140335716425 */ \
+ ts -= (t3*3227 + 16384) >> 15; \
+ /* 17515/32768 ~= Tan[5*Pi/32] ~= 0.5345111359507916 */ \
+ tk -= (tb*17515 + 16384) >> 15; \
+ /* 13623/16384 ~= Sin[5*Pi/16] ~= 0.8314696123025452 */ \
+ tb += (tk*13623 + 8192) >> 14; \
+ /* 17515/32768 ~= Tan[5*Pi/32] ~= 0.5345111359507916 */ \
+ tk -= (tb*17515 + 16384) >> 15; \
+ /* 6723/8192 ~= Tan[7*Pi/32] ~= 0.8206787908286602 */ \
+ t4 -= (tr*6723 + 4096) >> 13; \
+ /* 16069/16384 ~= Sin[7*Pi/16] ~= 0.9807852804032304 */ \
+ tr += (t4*16069 + 8192) >> 14; \
+ /* 6723/8192 ~= Tan[7*Pi/32] ~= 0.8206787908286602 */ \
+ t4 -= (tr*6723 + 4096) >> 13; \
+ \
+ t4 = -t4; \
+ \
+ tp += tm; \
+ tm -= DAALA_DCT_RSHIFT(tp, 1); \
+ t9 -= t6; \
+ t6 += DAALA_DCT_RSHIFT(t9, 1); \
+ th -= t1; \
+ t1 += DAALA_DCT_RSHIFT(th, 1); \
+ tu -= te; \
+ te += DAALA_DCT_RSHIFT(tu, 1); /* pass */ \
+ t5 -= tl; \
+ tl += DAALA_DCT_RSHIFT(t5, 1); \
+ ta += tq; \
+ tq -= DAALA_DCT_RSHIFT(ta, 1); \
+ td += tt; \
+ tt -= DAALA_DCT_RSHIFT(td, 1); \
+ t2 -= ti; \
+ ti += DAALA_DCT_RSHIFT(t2, 1); /* pass */ \
+ t7 += t8; \
+ t8 -= DAALA_DCT_RSHIFT(t7, 1); \
+ tn -= to; \
+ to += DAALA_DCT_RSHIFT(tn, 1); \
+ tf -= tv; \
+ tv += DAALA_DCT_RSHIFT(tf, 1); \
+ t0 += tg; \
+ tg -= DAALA_DCT_RSHIFT(t0, 1); /* pass */ \
+ tj -= t3; \
+ t3 += DAALA_DCT_RSHIFT(tj, 1); /* pass */ \
+ ts -= tc; \
+ tc += DAALA_DCT_RSHIFT(ts, 1); \
+ t4 -= tb; \
+ tb += DAALA_DCT_RSHIFT(t4, 1); /* pass */ \
+ tk -= tr; \
+ tr += DAALA_DCT_RSHIFT(tk, 1); \
+ \
+ t1 = -t1; \
+ t3 = -t3; \
+ t7 = -t7; \
+ t8 = -t8; \
+ tg = -tg; \
+ tm = -tm; \
+ to = -to; \
+ \
+ /* 14341/16384 ~= Tan[3*Pi/16] + Tan[Pi/8]/2 ~= 0.875285419105846 */ \
+ tm -= (t9*14341 + 8192) >> 14; \
+ /* 15137/16384 ~= Sin[3*Pi/8] ~= 0.923879532511287 */ \
+ t9 += (tm*15137 + 8192) >> 14; \
+ /* 4161/16384 ~= Tan[3*Pi/16] - Tan[Pi/8] ~= 0.253965075546204 */ \
+ tm -= (t9*4161 + 8192) >> 14; \
+ /* 4161/16384 ~= Tan[3*Pi/16] - Tan[Pi/8] ~= 0.253965075546204 */ \
+ tp -= (t6*4161 + 8192) >> 14; \
+ /* 15137/16384 ~= Sin[3*Pi/8] ~= 0.923879532511287 */ \
+ t6 += (tp*15137 + 8192) >> 14; \
+ /* 28681/32768 ~= Tan[3*Pi/16] + Tan[Pi/8]/2 ~= 0.875285419105846 */ \
+ tp -= (t6*28681 + 16384) >> 15; \
+ /* -19195/32768 ~= Tan[Pi/8] - Tan[Pi/4] ~= -0.585786437626905 */ \
+ th += (te*19195 + 16384) >> 15; \
+ /* 11585/16384 ~= Sin[Pi/4] ~= 0.707106781186548 */ \
+ te += (th*11585 + 8192) >> 14; \
+ /* 29957/32768 ~= Tan[Pi/8] + Tan[Pi/4]/2 ~= 0.914213562373095 */ \
+ th -= (te*29957 + 16384) >> 15; \
+ /* 14341/16384 ~= Tan[3*Pi/16] + Tan[Pi/8]/2 ~= 0.875285419105846 */ \
+ tq -= (t5*14341 + 8192) >> 14; \
+ /* 15137/16384 ~= Sin[3*Pi/8] ~= 0.923879532511287 */ \
+ t5 += (tq*15137 + 8192) >> 14; \
+ /* 4161/16384 ~= Tan[3*Pi/16] - Tan[Pi/8] ~= 0.253965075546204 */ \
+ tq -= (t5*4161 + 8192) >> 14; \
+ /* 3259/8192 ~= 2*Tan[Pi/16] ~= 0.397824734759316 */ \
+ ta -= (tl*3259 + 4096) >> 13; \
+ /* 3135/16384 ~= Sin[Pi/8]/2 ~= 0.1913417161825449 */ \
+ tl += (ta*3135 + 8192) >> 14; \
+ /* 3259/8192 ~= 2*Tan[Pi/16] ~= 0.397824734759316 */ \
+ ta -= (tl*3259 + 4096) >> 13; \
+ /* 7489/8192 ~= Tan[Pi/8] + Tan[Pi/4]/2 ~= 0.914213562373095 */ \
+ ti -= (td*7489 + 4096) >> 13; \
+ /* 11585/16384 ~= Sin[Pi/4] ~= 0.707106781186548 */ \
+ td += (ti*11585 + 8192) >> 14; \
+ /* -19195/32768 ~= Tan[Pi/8] - Tan[Pi/4] ~= -0.585786437626905 */ \
+ ti += (td*19195 + 16384) >> 15; \
+ /* 14341/16384 ~= Tan[3*Pi/16] + Tan[Pi/8]/2 ~= 0.875285419105846 */ \
+ to -= (t7*14341 + 8192) >> 14; \
+ /* 15137/16384 ~= Sin[3*Pi/8] ~= 0.923879532511287 */ \
+ t7 += (to*15137 + 8192) >> 14; \
+ /* 4161/16384 ~= Tan[3*Pi/16] - Tan[Pi/8] ~= 0.253965075546204 */ \
+ to -= (t7*4161 + 8192) >> 14; \
+ /* 4161/16384 ~= Tan[3*Pi/16] - Tan[Pi/8] ~= 0.253965075546204 */ \
+ tn -= (t8*4161 + 8192) >> 14; \
+ /* 15137/16384 ~= Sin[3*Pi/8] ~= 0.923879532511287 */ \
+ t8 += (tn*15137 + 8192) >> 14; \
+ /* 28681/32768 ~= Tan[3*Pi/16] + Tan[Pi/8]/2 ~= 0.875285419105846 */ \
+ tn -= (t8*28681 + 16384) >> 15; \
+ /* -19195/32768 ~= Tan[Pi/8] - Tan[Pi/4] ~= -0.585786437626905 */ \
+ tf += (tg*19195 + 16384) >> 15; \
+ /* 11585/16384 ~= Sin[Pi/4] ~= 0.707106781186548 */ \
+ tg += (tf*11585 + 8192) >> 14; \
+ /* 29957/32768 ~= Tan[Pi/8] + Tan[Pi/4]/2 ~= 0.914213562373095 */ \
+ tf -= (tg*29957 + 16384) >> 15; \
+ /* -19195/32768 ~= Tan[Pi/8] - Tan[Pi/4] ~= -0.585786437626905 */ \
+ tj += (tc*19195 + 16384) >> 15; \
+ /* 11585/16384 ~= Sin[Pi/4] ~= 0.707106781186548 */ \
+ tc += (tj*11585 + 8192) >> 14; \
+ /* 29957/32768 ~= Tan[Pi/8] + Tan[Pi/4]/2 ~= 0.914213562373095 */ \
+ tj -= (tc*29957 + 16384) >> 15; \
+ /* 13573/16384 ~= 2*Tan[Pi/8] ~= 0.828427124746190 */ \
+ tk += (tb*13573 + 8192) >> 14; \
+ /* 11585/32768 ~= Sin[Pi/4]/2 ~= 0.353553390593274 */ \
+ tb -= (tk*11585 + 16384) >> 15; \
+ /* 13573/16384 ~= 2*Tan[Pi/8] ~= 0.828427124746190 */ \
+ tk += (tb*13573 + 8192) >> 14; \
+ \
+ tf = -tf; \
+ \
+ } \
+ while (0)
+
+#define OD_IDCT_64(u0, uw, ug, uM, u8, uE, uo, uU, u4, uA, uk, uQ, uc, uI, \
+us, uY, u2, uy, ui, uO, ua, uG, uq, uW, u6, uC, um, uS, ue, uK, uu, u_, u1, \
+ux, uh, uN, u9, uF, up, uV, u5, uB, ul, uR, ud, uJ, ut, uZ, u3, uz, uj, uP, \
+ub, uH, ur, uX, u7, uD, un, uT, uf, uL, uv, u) \
+/* Embedded 64-point orthonormal Type-II fDCT. */ \
+do { \
+ int u1h; \
+ int u3h; \
+ int u5h; \
+ int u7h; \
+ int u9h; \
+ int ubh; \
+ int udh; \
+ int ufh; \
+ int uhh; \
+ int ujh; \
+ int ulh; \
+ int unh; \
+ int uph; \
+ int urh; \
+ int uth; \
+ int uvh; \
+ int uxh; \
+ int uzh; \
+ int uBh; \
+ int uDh; \
+ int uFh; \
+ int uHh; \
+ int uJh; \
+ int uLh; \
+ int uNh; \
+ int uPh; \
+ int uRh; \
+ int uTh; \
+ int uVh; \
+ int uXh; \
+ int uZh; \
+ int uh_; \
+ OD_IDST_32_ASYM(u, uL, uT, uD, uX, uH, uP, uz, uZ, uJ, uR, uB, uV, uF, \
+ uN, ux, u_, uK, uS, uC, uW, uG, uO, uy, uY, uI, uQ, uA, uU, uE, uM, uw); \
+ OD_IDCT_32_ASYM(u0, ug, u8, uo, u4, uk, uc, us, u2, ui, ua, uq, u6, um, \
+ ue, uu, u1, u1h, uh, uhh, u9, u9h, up, uph, u5, u5h, ul, ulh, ud, udh, \
+ ut, uth, u3, u3h, uj, ujh, ub, ubh, ur, urh, u7, u7h, un, unh, uf, ufh, \
+ uv, uvh); \
+ uh_ = DAALA_DCT_RSHIFT(u, 1); \
+ u0 += uh_; \
+ u = u0 - u; \
+ u_ = u1h - u_; \
+ u1 -= u_; \
+ uZh = DAALA_DCT_RSHIFT(uZ, 1); \
+ u2 += uZh; \
+ uZ = u2 - uZ; \
+ uY = u3h - uY; \
+ u3 -= uY; \
+ uXh = DAALA_DCT_RSHIFT(uX, 1); \
+ u4 += uXh; \
+ uX = u4 - uX; \
+ uW = u5h - uW; \
+ u5 -= uW; \
+ uVh = DAALA_DCT_RSHIFT(uV, 1); \
+ u6 += uVh; \
+ uV = u6 - uV; \
+ uU = u7h - uU; \
+ u7 -= uU; \
+ uTh = DAALA_DCT_RSHIFT(uT, 1); \
+ u8 += uTh; \
+ uT = u8 - uT; \
+ uS = u9h - uS; \
+ u9 -= uS; \
+ uRh = DAALA_DCT_RSHIFT(uR, 1); \
+ ua += uRh; \
+ uR = ua - uR; \
+ uQ = ubh - uQ; \
+ ub -= uQ; \
+ uPh = DAALA_DCT_RSHIFT(uP, 1); \
+ uc += uPh; \
+ uP = uc - uP; \
+ uO = udh - uO; \
+ ud -= uO; \
+ uNh = DAALA_DCT_RSHIFT(uN, 1); \
+ ue += uNh; \
+ uN = ue - uN; \
+ uM = ufh - uM; \
+ uf -= uM; \
+ uLh = DAALA_DCT_RSHIFT(uL, 1); \
+ ug += uLh; \
+ uL = ug - uL; \
+ uK = uhh - uK; \
+ uh -= uK; \
+ uJh = DAALA_DCT_RSHIFT(uJ, 1); \
+ ui += uJh; \
+ uJ = ui - uJ; \
+ uI = ujh - uI; \
+ uj -= uI; \
+ uHh = DAALA_DCT_RSHIFT(uH, 1); \
+ uk += uHh; \
+ uH = uk - uH; \
+ uG = ulh - uG; \
+ ul -= uG; \
+ uFh = DAALA_DCT_RSHIFT(uF, 1); \
+ um += uFh; \
+ uF = um - uF; \
+ uE = unh - uE; \
+ un -= uE; \
+ uDh = DAALA_DCT_RSHIFT(uD, 1); \
+ uo += uDh; \
+ uD = uo - uD; \
+ uC = uph - uC; \
+ up -= uC; \
+ uBh = DAALA_DCT_RSHIFT(uB, 1); \
+ uq += uBh; \
+ uB = uq - uB; \
+ uA = urh - uA; \
+ ur -= uA; \
+ uzh = DAALA_DCT_RSHIFT(uz, 1); \
+ us += uzh; \
+ uz = us - uz; \
+ uy = uth - uy; \
+ ut -= uy; \
+ uxh = DAALA_DCT_RSHIFT(ux, 1); \
+ uu += uxh; \
+ ux = uu - ux; \
+ uw = uvh - uw; \
+ uv -= uw; \
+ } while (0)
+
+static av_always_inline void idct_1D_64(pixel *x, int xstride, const pixel y[64])
+{
+ int t0 = y[0], tw = y[1], tg = y[2], tM = y[3], t8 = y[4], tE = y[5];
+ int to = y[6], tU = y[7], t4 = y[8], tA = y[9], tk = y[10], tQ = y[11];
+ int tc = y[12], tI = y[13], ts = y[14], tY = y[15], t2 = y[16], ty = y[17];
+ int ti = y[18], tO = y[19], ta = y[20], tG = y[21], tq = y[22], tW = y[23];
+ int t6 = y[24], tC = y[25], tm = y[26], tS = y[27], te = y[28], tK = y[29];
+ int tu = y[30], t_ = y[31], t1 = y[32], tx = y[33], th = y[34], tN = y[35];
+ int t9 = y[36], tF = y[37], tp = y[38], tV = y[39], t5 = y[40], tB = y[41];
+ int tl = y[42], tR = y[43], td = y[44], tJ = y[45], tt = y[46], tZ = y[47];
+ int t3 = y[48], tz = y[49], tj = y[50], tP = y[51], tb = y[52], tH = y[53];
+ int tr = y[54], tX = y[55], t7 = y[56], tD = y[57], tn = y[58], tT = y[59];
+ int tf = y[60], tL = y[61], tv = y[62], t = y[63];
+ OD_IDCT_64(t0, tw, tg, tM, t8, tE, to, tU, t4, tA, tk, tQ, tc, tI, ts, tY,
+ t2, ty, ti, tO, ta, tG, tq, tW, t6, tC, tm, tS, te, tK, tu, t_, t1, tx,
+ th, tN, t9, tF, tp, tV, t5, tB, tl, tR, td, tJ, tt, tZ, t3, tz, tj, tP,
+ tb, tH, tr, tX, t7, tD, tn, tT, tf, tL, tv, t);
+ x[0*xstride] = (pixel)t0;
+ x[1*xstride] = (pixel)t1;
+ x[2*xstride] = (pixel)t2;
+ x[3*xstride] = (pixel)t3;
+ x[4*xstride] = (pixel)t4;
+ x[5*xstride] = (pixel)t5;
+ x[6*xstride] = (pixel)t6;
+ x[7*xstride] = (pixel)t7;
+ x[8*xstride] = (pixel)t8;
+ x[9*xstride] = (pixel)t9;
+ x[10*xstride] = (pixel)ta;
+ x[11*xstride] = (pixel)tb;
+ x[12*xstride] = (pixel)tc;
+ x[13*xstride] = (pixel)td;
+ x[14*xstride] = (pixel)te;
+ x[15*xstride] = (pixel)tf;
+ x[16*xstride] = (pixel)tg;
+ x[17*xstride] = (pixel)th;
+ x[18*xstride] = (pixel)ti;
+ x[19*xstride] = (pixel)tj;
+ x[20*xstride] = (pixel)tk;
+ x[21*xstride] = (pixel)tl;
+ x[22*xstride] = (pixel)tm;
+ x[23*xstride] = (pixel)tn;
+ x[24*xstride] = (pixel)to;
+ x[25*xstride] = (pixel)tp;
+ x[26*xstride] = (pixel)tq;
+ x[27*xstride] = (pixel)tr;
+ x[28*xstride] = (pixel)ts;
+ x[29*xstride] = (pixel)tt;
+ x[30*xstride] = (pixel)tu;
+ x[31*xstride] = (pixel)tv;
+ x[32*xstride] = (pixel)tw;
+ x[33*xstride] = (pixel)tx;
+ x[34*xstride] = (pixel)ty;
+ x[35*xstride] = (pixel)tz;
+ x[36*xstride] = (pixel)tA;
+ x[37*xstride] = (pixel)tB;
+ x[38*xstride] = (pixel)tC;
+ x[39*xstride] = (pixel)tD;
+ x[40*xstride] = (pixel)tE;
+ x[41*xstride] = (pixel)tF;
+ x[41*xstride] = (pixel)tF;
+ x[42*xstride] = (pixel)tG;
+ x[43*xstride] = (pixel)tH;
+ x[44*xstride] = (pixel)tI;
+ x[45*xstride] = (pixel)tJ;
+ x[46*xstride] = (pixel)tK;
+ x[47*xstride] = (pixel)tL;
+ x[48*xstride] = (pixel)tM;
+ x[49*xstride] = (pixel)tN;
+ x[50*xstride] = (pixel)tO;
+ x[51*xstride] = (pixel)tP;
+ x[52*xstride] = (pixel)tQ;
+ x[53*xstride] = (pixel)tR;
+ x[54*xstride] = (pixel)tS;
+ x[55*xstride] = (pixel)tT;
+ x[56*xstride] = (pixel)tU;
+ x[57*xstride] = (pixel)tV;
+ x[58*xstride] = (pixel)tW;
+ x[59*xstride] = (pixel)tX;
+ x[60*xstride] = (pixel)tY;
+ x[61*xstride] = (pixel)tZ;
+ x[62*xstride] = (pixel)t_;
+ x[63*xstride] = (pixel)t;
+}
+
+#define IDCT_DECL(BSIZE, TYPE) \
+static void idct_2D_##BSIZE(uint8_t *_dst, int dstride, const uint8_t *_src, int istride) \
+{\
+ int i; \
+ TYPE tmp[BSIZE*BSIZE]; \
+ TYPE *dst = (TYPE *)_dst; \
+ const TYPE *src = (const TYPE *)_src; \
+ for (i = 0; i < BSIZE; i++) \
+ idct_1D_##BSIZE(tmp + i, BSIZE, src + istride*i); \
+ for (i = 0; i < BSIZE; i++) \
+ idct_1D_##BSIZE(dst + i, dstride, tmp + BSIZE*i); \
+}
+
+IDCT_DECL(4, pixel)
+IDCT_DECL(8, pixel)
+IDCT_DECL(16, pixel)
+IDCT_DECL(32, pixel)
+IDCT_DECL(64, pixel)
+
+#define FILTER_PARAM_4_3 (33)
+#define FILTER_PARAM_4_2 (-15)
+#define FILTER_PARAM_4_1 (75)
+#define FILTER_PARAM_4_0 (85)
+
+static void prefilter_4x4(pixel *dst, const pixel *src)
+{
+ int t[4];
+ t[3] = src[0]-src[3];
+ t[2] = src[1]-src[2];
+ t[1] = src[1]-(t[2]>>1);
+ t[0] = src[0]-(t[3]>>1);
+ # if FILTER_PARAM_4_0 != 64
+ t[2] = t[2]*FILTER_PARAM_4_0>>6;
+ t[2] += -t[2]>>(32-1)&1;
+ # endif
+ # if FILTER_PARAM_4_1 != 64
+ t[3] = t[3]*FILTER_PARAM_4_1>>6;
+ t[3] += -t[3]>>(32-1)&1;
+ # endif
+ t[3] += (t[2]*FILTER_PARAM_4_2+32)>>6;
+ t[2] += (t[3]*FILTER_PARAM_4_3+32)>>6;
+ t[0] += t[3]>>1;
+ dst[0] = (pixel)t[0];
+ t[1] += t[2]>>1;
+ dst[1] = (pixel)t[1];
+ dst[2] = (pixel)(t[1]-t[2]);
+ dst[3] = (pixel)(t[0]-t[3]);
+}
+
+static void postfilter_4x4(pixel *dst, const pixel *src)
+{
+ int t[4];
+ t[3] = src[0]-src[3];
+ t[2] = src[1]-src[2];
+ t[1] = src[1]-(t[2]>>1);
+ t[0] = src[0]-(t[3]>>1);
+ t[2] -= (t[3]*FILTER_PARAM_4_3+32)>>6;
+ t[3] -= (t[2]*FILTER_PARAM_4_2+32)>>6;
+ #if FILTER_PARAM_4_1 != 64
+ t[3] = t[3]*(1 << 6)/FILTER_PARAM_4_1;
+ #endif
+ #if FILTER_PARAM_4_0 != 64
+ t[2] = t[2]*(1 << 6)/FILTER_PARAM_4_0;
+ #endif
+ t[0] += t[3]>>1;
+ dst[0] = (pixel)t[0];
+ t[1] += t[2]>>1;
+ dst[1] = (pixel)t[1];
+ dst[2] = (pixel)(t[1]-t[2]);
+ dst[3] = (pixel)(t[0]-t[3]);
+}
+
+static void daala_split_prefilter(uint8_t *_blk, int stride, int bs,
+ int hfilter, int vfilter)
+{
+ int i, j;
+ const int f = 0;
+ pixel *c;
+ pixel *blk = (pixel *)_blk;
+ if (hfilter) {
+ c = blk + ((2 << bs) - (2 << f))*stride;
+ for (j = 0; j < 4 << bs; j++) {
+ int k;
+ pixel t[4 << DAALA_NBSIZES];
+ for (k = 0; k < 4 << f; k++) t[k] = c[stride*k + j];
+ prefilter_4x4(t, t);
+ for (k = 0; k < 4 << f; k++) c[stride*k + j] = t[k];
+ }
+ }
+ if (vfilter) {
+ c = blk + (2 << bs) - (2 << f);
+ for (i = 0; i < 4 << bs; i++) {
+ prefilter_4x4(c + i*stride, c + i*stride);
+ }
+ }
+}
+
+
+static void daala_split_postfilter(uint8_t *_blk, int stride, int bs,
+ int hfilter, int vfilter)
+{
+ int i, j;
+ const int f = 0;
+ pixel *c;
+ pixel *blk = (pixel *)_blk;
+ if (vfilter) {
+ c = blk + (2 << bs) - (2 << f);
+ for (i = 0; i < 4 << bs; i++) {
+ postfilter_4x4(c + i*stride, c + i*stride);
+ }
+ }
+ if (hfilter) {
+ c = blk + ((2 << bs) - (2 << f))*stride;
+ for (j = 0; j < 4 << bs; j++) {
+ int k;
+ pixel t[4 << DAALA_NBSIZES];
+ for (k = 0; k < 4 << f; k++) t[k] = c[stride*k + j];
+ postfilter_4x4(t, t);
+ for (k = 0; k < 4 << f; k++) c[stride*k + j] = t[k];
+ }
+ }
+}
+
+static void daala_frame_postfilter(uint8_t *_blk, int stride, int nhsb,
+ int nvsb, int xdec, int ydec)
+{
+ int sbx;
+ int sby;
+ int i;
+ int j;
+ int f;
+ pixel *c;
+ pixel *blk = (pixel *)_blk;
+ f = 0;
+ c = blk + (DAALA_BSIZE_MAX >> ydec) - (2 << f);
+ for (sbx = 1; sbx < nhsb; sbx++) {
+ for (i = 0; i < nvsb << DAALA_LOG_BSIZE_MAX >> ydec; i++) {
+ postfilter_4x4(c + i*stride, c + i*stride);
+ }
+ c += DAALA_BSIZE_MAX >> xdec;
+ }
+ c = blk + ((DAALA_BSIZE_MAX >> ydec) - (2 << f))*stride;
+ for (sby = 1; sby < nvsb; sby++) {
+ for (j = 0; j < nhsb << DAALA_LOG_BSIZE_MAX >> xdec; j++) {
+ int k;
+ pixel t[4 << DAALA_NBSIZES];
+ for (k = 0; k < 4 << f; k++) t[k] = c[stride*k + j];
+ postfilter_4x4(t, t);
+ for (k = 0; k < 4 << f; k++) c[stride*k + j] = t[k];
+ }
+ c += DAALA_BSIZE_MAX*stride >> ydec;
+ }
+}
+
+/* Increase horizontal frequency resolution of a block and return the LF */
+static av_always_inline void daala_sf_hor_up(pixel *dst, int dstride,
+ const pixel *src, int istride,
+ int dx, int n)
+{
+ int x, y;
+ for (y = 0; y < n; y++) {
+ for (x = 0; x < n >> 1; x++) {
+ const int hswap = x & 1;
+ pixel ll = src[y*istride + x];
+ pixel lh = src[y*istride + x + dx];
+ lh = ll - lh;
+ ll -= DAALA_DCT_RSHIFT(lh, 1);
+ dst[y*dstride + 2*x + hswap] = ll;
+ dst[y*dstride + 2*x + 1 - hswap] = lh;
+ }
+ }
+}
+
+/* Increase vertical frequency resolution of a block and return the LF */
+static av_always_inline void daala_sf_ver_up(pixel *dst, int dstride,
+ const pixel *src, int istride,
+ int dy, int n)
+{
+ int x, y;
+ for (y = 0; y < n >> 1; y++) {
+ const int vswap = y & 1;
+ for (x = 0; x < n; x++) {
+ pixel ll = src[y*istride + x];
+ pixel hl = src[(y + dy)*istride + x];
+ hl = ll - hl;
+ ll -= DAALA_DCT_RSHIFT(hl, 1);
+ dst[(2*y + vswap)*dstride + x] = ll;
+ dst[(2*y + 1 - vswap)*dstride + x] = hl;
+ }
+ }
+}
+
+/* Increase vertical and horizontal resolution of a block and return the LF */
+static av_always_inline void daala_sf_ful_up(pixel *dst, int dstride,
+ const pixel *src, int istride,
+ int dx, int dy, int n)
+{
+ int x, y;
+ for (y = 0; y < n >> 1; y++) {
+ const int vswap = y & 1;
+ for (x = 0; x < n >> 1; x++) {
+ const int hswap = x & 1;
+ pixel ll = src[y*istride + x];
+ pixel lh = src[y*istride + x + dx];
+ pixel hl = src[(y + dy)*istride + x];
+ pixel hh = src[(y + dy)*istride + x + dx];
+ daala_haar_kern(&ll, &hl, &lh, &hh);
+ dst[(2*y + vswap)*dstride + 2*x + hswap] = ll;
+ dst[(2*y + vswap)*dstride + 2*x + 1 - hswap] = lh;
+ dst[(2*y + 1 - vswap)*dstride + 2*x + hswap] = hl;
+ dst[(2*y + 1 - vswap)*dstride + 2*x + 1 - hswap] = hh;
+ }
+ }
+}
+
+/* Chroma from luma */
+static void daala_cfl_resample(uint8_t *_dst, int dstride, const uint8_t *_src,
+ int istride, int xdec, int ydec, int bs, int chroma_bs)
+{
+ int i, j;
+ const int n = 4 << bs;
+ pixel *dst = (pixel *)_dst;
+ pixel *src = (pixel *)_src;
+ if (!chroma_bs && (xdec || ydec)) {
+ if (xdec) {
+ if (ydec) {
+ daala_sf_ful_up(dst, dstride, src, istride, n, n, n);
+ for (i = 0; i < 4; i++) {
+ for (j = 0; j < 4; j++) {
+ dst[i*dstride + j] = (daaladsp_cfl_scale[j][i]*dst[i*dstride + j] + 64) >> 7;
+ }
+ }
+ } else {
+ daala_sf_hor_up(dst, dstride, src, istride, n, n);
+ }
+ } else {
+ daala_sf_ver_up(dst, dstride, src, istride, n, n);
+ }
+ } else {
+ for (i = 0; i < n; i++) {
+ for (j = 0; j < n; j++) {
+ dst[i*dstride + j] = src[i*istride + j];
+ }
+ }
+ }
+}
+
+static void daala_intra_prediction(uint8_t *_pred, const uint8_t *_ref,
+ int stride, int x, int y,
+ enum DaalaBsize *bsize, int bstride,
+ enum DaalaBsize bs)
+{
+ int i;
+ int64_t g1 = 0, g2 = 0;
+ const int n = 1 << (bs + DAALA_LOG_BSIZE0);
+ const int t = y && DAALA_BSIZE4x4(bsize, bstride, x, y - 1) == bs;
+ const int l = x && DAALA_BSIZE4x4(bsize, bstride, x - 1, y) == bs;
+ pixel *pred = (pixel *)_pred;
+ const pixel *ref = (const pixel *)_ref;
+ if (t) {
+ for (i = 1; i < 4; i++)
+ g1 += abs(ref[-n*stride + i]);
+ }
+ if (l) {
+ for (i = 1; i < 4; i++)
+ g2 += abs(ref[-n + i*stride]);
+ }
+ if (t) {
+ for (i = 4; i < n; i++)
+ pred[i] = ref[-n*stride + i];
+ }
+ if (l) {
+ for (i = 4; i < n; i++)
+ pred[i*n] = ref[-n + i*stride];
+ }
+ if (g1 > g2) {
+ if (t)
+ for (i = 1; i < 4; i++)
+ pred[i] = ref[-n*stride + i];
+ } else if (l) {
+ for (i = 1; i < 4; i++)
+ pred[i*n] = ref[-n + i*stride];
+ }
+}
+
+int daaladsp_init(DaalaDSP *d, int bit_depth)
+{
+
+ d->cfl = daala_cfl_resample;
+
+ d->intrapred = daala_intra_prediction;
+
+ d->pre_split_filter = daala_split_prefilter;
+ d->post_split_filter = daala_split_postfilter;
+
+ d->frame_postfilter = daala_frame_postfilter;
+
+ /* Transforms */
+ d->idwt = daala_haar_iwt;
+ d->idct[DAALA_BLOCKSIZE_4x4] = idct_2D_4;
+ d->idct[DAALA_BLOCKSIZE_8x8] = idct_2D_8;
+ d->idct[DAALA_BLOCKSIZE_16x16] = idct_2D_16;
+ d->idct[DAALA_BLOCKSIZE_32x32] = idct_2D_32;
+ d->idct[DAALA_BLOCKSIZE_64x64] = idct_2D_64;
+
+ return 0;
+}
diff --git a/libavcodec/daaladsp.h b/libavcodec/daaladsp.h
new file mode 100644
index 0000000..6c579d1
--- /dev/null
+++ b/libavcodec/daaladsp.h
@@ -0,0 +1,78 @@
+/*
+ * Daala video decoder
+ *
+ * Copyright (C) 2015 Rostislav Pehlivanov <atomnuker at gmail.com>
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#ifndef AVCODEC_DAALADSP_H
+#define AVCODEC_DAALADSP_H
+
+#include "daala.h"
+
+/* Haar transform (external), internally in daaladct.c it's manually inlined */
+static av_always_inline void daala_haar_kern(dctcoef *a, dctcoef *b,
+ dctcoef *c, dctcoef *d)
+{
+ dctcoef tmp;
+ *a += *c;
+ *d -= *b;
+ tmp = (*a - *d) >> 1;
+ *b = tmp - *b;
+ *c = tmp - *c;
+ *a -= *b;
+ *d += *c;
+}
+
+typedef struct DaalaDSP {
+
+ /* Intra-block prediction */
+ void (*intrapred)(uint8_t *pred, const uint8_t *ref,
+ int stride, int x, int y,
+ enum DaalaBsize *bsize, int bstride,
+ enum DaalaBsize bs);
+
+ /* Chroma from luma */
+ void (*cfl)(uint8_t *dst, int dstride, const uint8_t *src,
+ int istride, int xdec, int ydec, int bs, int chroma_bs);
+
+ /* Pre-split filter */
+ void (*pre_split_filter)(uint8_t *blk, int stride, int bs,
+ int hfilter, int vfilter);
+
+ /* Post-split filter */
+ void (*post_split_filter)(uint8_t *blk, int stride, int bs,
+ int hfilter, int vfilter);
+
+ /* Frame postfilter */
+ void (*frame_postfilter)(uint8_t *blk, int stride, int nhsb,
+ int nvsb, int xdec, int ydec);
+
+ /* IDWT for the Haar wavelets */
+ void (*idwt)(uint8_t *dst, const int dst_stride, const uint8_t *src,
+ const int src_stride, const int ln);
+
+ /* IDCT transforms */
+ void (*(idct)[DAALA_NBSIZES])(uint8_t *dst, int istride, const uint8_t *src,
+ int ostride);
+
+} DaalaDSP;
+
+int daaladsp_init(DaalaDSP *d, int bit_depth);
+
+#endif /* AVCODEC_DAALADSP_H */
diff --git a/libavcodec/daalatab.c b/libavcodec/daalatab.c
new file mode 100644
index 0000000..4fa255d
--- /dev/null
+++ b/libavcodec/daalatab.c
@@ -0,0 +1,1544 @@
+/*
+ * Copyright 2001-2015 Xiph.Org and contributors.
+ * Copyright 2015 Rostislav Pehlivanov <atomnuker at gmail.com>
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+ * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "daalatab.h"
+
+const struct DaalaPixFmts daala_valid_formats[] = {
+ {AV_PIX_FMT_YUV420P, 3, 8, 1, {{0,0}, {1,1}, {1,1}, {0,0}}},
+ {AV_PIX_FMT_YUV444P, 3, 8, 1, {{0,0}, {0,0}, {0,0}, {0,0}}}
+};
+const int daala_valid_formats_num = FF_ARRAY_ELEMS(daala_valid_formats);
+
+/* Haar "quantization matrix" for each decomposition level */
+const int daala_haar_qm[][DAALA_LOG_BSIZE_MAX] = {
+ {16, 16, 16, 16, 24, 32}, /* horizontal/vertical direction. */
+ {16, 16, 16, 24, 32, 48}, /* "diagonal" direction. */
+};
+
+/* Haar basis scaling compensation, [0] - x,y; [1] - diag */
+const int32_t daala_dc_comp[][2] = { {21, 25}, {18, 20}, {17, 18}, {17, 17} };
+
+/* Flat (e.g. PSNR) QM */
+const int16_t daala_qm_flat[] = {
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16
+};
+
+/* HVS quantization matrix */
+static const int16_t daala_qm_hvs[] = {
+ 16, 16, 18, 21, 24, 28, 32, 36,
+ 16, 17, 20, 21, 24, 27, 31, 35,
+ 18, 20, 24, 25, 27, 31, 33, 38,
+ 21, 21, 25, 28, 30, 34, 37, 42,
+ 24, 24, 27, 30, 34, 38, 43, 49,
+ 28, 27, 31, 34, 38, 44, 50, 58,
+ 32, 31, 33, 37, 43, 50, 58, 68,
+ 36, 35, 38, 42, 49, 58, 68, 78
+};
+
+const int16_t *const daala_qmatrices[] = {
+ daala_qm_flat,
+ daala_qm_hvs
+};
+
+/* Chroma from luma scaling */
+const uint16_t daaladsp_cfl_scale[4][4] = {
+ { 128, 128, 100, 36 },
+ { 128, 80, 71, 35 },
+ { 100, 71, 35, 31 },
+ { 36, 35, 31, 18 },
+};
+
+/* Generator: "trunc(e(((coded_quantizer)-6.235)*.10989525)*(1<<4))"
+ * Read the comment in quantizers.c, OD_CODED_QUANTIZER_MAP_Q4 of libdaala */
+const int daala_quant_codemap[] = {
+ 0x0000, /* 0 (lossless) */
+ 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000F, /* 1 */
+ 0x0011, 0x0013, 0x0015, 0x0018, 0x001B, 0x001E, /* 7 */
+ 0x0021, 0x0024, 0x0029, 0x002E, 0x0034, 0x003A, /* 13 */
+ 0x0041, 0x0048, 0x0051, 0x005A, 0x0064, 0x0070, /* 19 */
+ 0x007D, 0x008C, 0x009C, 0x00AE, 0x00C3, 0x00D9, /* 25 */
+ 0x00F3, 0x010F, 0x012F, 0x0152, 0x0179, 0x01A5, /* 31 */
+ 0x01D6, 0x020D, 0x0249, 0x028E, 0x02DA, 0x032E, /* 37 */
+ 0x038D, 0x03F7, 0x046D, 0x04F0, 0x0583, 0x0627, /* 43 */
+ 0x06De, 0x07AA, 0x088E, 0x098D, 0x0AA9, 0x0BE6, /* 49 */
+ 0x0D48, 0x0ED3, 0x108C, 0x1278, 0x149D, 0x1702, /* 55 */
+ 0x19AE, 0x1CAA, 0x1FFF /* 61 */
+};
+const int daala_quant_codemap_size = FF_ARRAY_ELEMS(daala_quant_codemap);
+
+/* Probability distribution functions */
+const ent_rng daalaent_cdf_tab[] = {
+ 16384, 32768, 10923, 21845, 32768, 8192, 16384, 24576, 32768, 6554, 13107,
+ 19661, 26214, 32768, 5461, 10923, 16384, 21845, 27307, 32768, 4681, 9362,
+ 14043, 18725, 23406, 28087, 32768, 4096, 8192, 12288, 16384, 20480, 24576,
+ 28672, 32768, 3641, 7282, 10923, 14564, 18204, 21845, 25486, 29127, 32768,
+ 3277, 6554, 9830, 13107, 16384, 19661, 22938, 26214, 29491, 32768, 2979,
+ 5958, 8937, 11916, 14895, 17873, 20852, 23831, 26810, 29789, 32768, 2731,
+ 5461, 8192, 10923, 13653, 16384, 19115, 21845, 24576, 27307, 30037, 32768,
+ 2521, 5041, 7562, 10082, 12603, 15124, 17644, 20165, 22686, 25206, 27727,
+ 30247, 32768, 341, 4681, 7022, 9362, 11703, 14043, 16384, 18725, 21065,
+ 23406, 25746, 28087, 30427, 32768, 2185, 4369, 6554, 8738, 10923, 13107,
+ 15292, 17476, 19661, 21845, 24030, 26214, 28399, 30583, 32768, 2048, 4096,
+ 6144, 8192, 10240, 12288, 14336, 16384, 18432, 20480, 22528, 24576, 26624,
+ 28672, 30720, 32768
+};
+
+/* TODO: maybe generate on runtime if cheap enough? */
+const ent_rng daalaent_cdf_exp_tab[][16] = {
+ {32753,32754,32755,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {32499,32753,32755,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {32243,32747,32755,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {31987,32737,32755,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {31732,32724,32755,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {31476,32706,32754,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {31220,32684,32753,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {30964,32658,32751,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {30708,32628,32748,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {30452,32594,32745,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {30198,32558,32742,32756,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {29941,32515,32736,32755,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {29686,32470,32731,32755,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {29429,32419,32723,32754,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {29174,32366,32715,32753,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {28918,32308,32705,32752,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {28662,32246,32694,32750,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {28406,32180,32681,32748,32757,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {28150,32110,32667,32745,32756,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {27894,32036,32651,32742,32756,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {27639,31959,32634,32739,32755,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {27383,31877,32614,32735,32755,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {27126,31790,32592,32730,32754,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {26871,31701,32569,32725,32753,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {26615,31607,32543,32719,32752,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {26361,31511,32517,32713,32751,32758,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {26104,31408,32485,32704,32748,32757,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {25848,31302,32452,32695,32746,32757,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {25591,31191,32416,32684,32743,32756,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {25336,31078,32379,32674,32741,32756,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {25080,30960,32338,32661,32737,32755,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {24824,30838,32295,32648,32733,32754,32759,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {24568,30712,32248,32632,32728,32752,32758,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {24313,30583,32199,32616,32723,32751,32758,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {24057,30449,32147,32598,32718,32750,32758,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {23801,30311,32091,32578,32711,32747,32757,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {23546,30170,32033,32557,32704,32745,32757,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {23288,30022,31969,32532,32695,32742,32756,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {23033,29873,31904,32507,32686,32739,32755,32760,32761,32762,32763,32764,32765,32766,32767,32768},
+ {22778,29720,31835,32479,32675,32735,32753,32759,32761,32762,32763,32764,32765,32766,32767,32768},
+ {22521,29561,31761,32449,32664,32731,32752,32759,32761,32762,32763,32764,32765,32766,32767,32768},
+ {22267,29401,31686,32418,32652,32727,32751,32759,32761,32762,32763,32764,32765,32766,32767,32768},
+ {22011,29235,31605,32383,32638,32722,32749,32758,32761,32762,32763,32764,32765,32766,32767,32768},
+ {21754,29064,31520,32345,32622,32715,32746,32757,32761,32762,32763,32764,32765,32766,32767,32768},
+ {21501,28893,31434,32307,32607,32710,32745,32757,32761,32762,32763,32764,32765,32766,32767,32768},
+ {21243,28713,31339,32262,32587,32701,32741,32755,32760,32762,32763,32764,32765,32766,32767,32768},
+ {20988,28532,31243,32217,32567,32693,32738,32754,32760,32762,32763,32764,32765,32766,32767,32768},
+ {20730,28344,31140,32167,32544,32682,32733,32752,32759,32762,32763,32764,32765,32766,32767,32768},
+ {20476,28156,31036,32116,32521,32673,32730,32751,32759,32762,32763,32764,32765,32766,32767,32768},
+ {20220,27962,30926,32061,32495,32661,32725,32749,32758,32762,32763,32764,32765,32766,32767,32768},
+ {19963,27763,30810,32000,32465,32647,32718,32746,32757,32761,32763,32764,32765,32766,32767,32768},
+ {19708,27562,30691,31938,32435,32633,32712,32743,32756,32761,32763,32764,32765,32766,32767,32768},
+ {19454,27358,30569,31873,32403,32618,32705,32741,32755,32761,32763,32764,32765,32766,32767,32768},
+ {19196,27146,30438,31801,32365,32599,32696,32736,32753,32760,32763,32764,32765,32766,32767,32768},
+ {18942,26934,30306,31728,32328,32581,32688,32733,32752,32760,32763,32764,32765,32766,32767,32768},
+ {18684,26714,30164,31647,32284,32558,32676,32727,32749,32758,32762,32764,32765,32766,32767,32768},
+ {18429,26493,30021,31565,32240,32535,32664,32721,32746,32757,32762,32764,32765,32766,32767,32768},
+ {18174,26268,29872,31477,32192,32510,32652,32715,32743,32756,32762,32764,32765,32766,32767,32768},
+ {17920,26040,29719,31386,32141,32483,32638,32708,32740,32754,32761,32764,32765,32766,32767,32768},
+ {17661,25803,29556,31286,32083,32451,32620,32698,32734,32751,32759,32763,32765,32766,32767,32768},
+ {17406,25566,29391,31184,32024,32418,32603,32690,32731,32750,32759,32763,32765,32766,32767,32768},
+ {17151,25325,29220,31076,31961,32383,32584,32680,32726,32748,32758,32763,32765,32766,32767,32768},
+ {16896,25080,29044,30964,31894,32344,32562,32668,32719,32744,32756,32762,32765,32766,32767,32768},
+ {16639,24829,28860,30844,31821,32302,32539,32655,32712,32740,32754,32761,32764,32766,32767,32768},
+ {16384,24576,28672,30720,31744,32256,32512,32640,32704,32736,32752,32760,32764,32766,32767,32768},
+ {16130,24320,28479,30591,31663,32208,32485,32625,32696,32732,32750,32759,32764,32766,32767,32768},
+ {15872,24056,28276,30452,31574,32152,32450,32604,32683,32724,32745,32756,32762,32765,32766,32768},
+ {15615,23789,28068,30308,31480,32094,32415,32583,32671,32717,32741,32754,32761,32764,32766,32768},
+ {15361,23521,27856,30159,31382,32032,32377,32560,32657,32709,32737,32752,32760,32764,32766,32768},
+ {15103,23245,27634,30000,31275,31963,32334,32534,32642,32700,32731,32748,32757,32762,32765,32768},
+ {14848,22968,27409,29837,31165,31891,32288,32505,32624,32689,32725,32744,32755,32761,32764,32768},
+ {14592,22686,27176,29666,31047,31813,32238,32474,32605,32678,32718,32740,32752,32759,32763,32768},
+ {14336,22400,26936,29488,30923,31730,32184,32439,32583,32664,32709,32735,32749,32757,32762,32768},
+ {14079,22109,26689,29301,30791,31641,32125,32401,32559,32649,32700,32729,32746,32756,32761,32768},
+ {13825,21817,26437,29108,30652,31545,32061,32359,32532,32632,32690,32723,32742,32753,32759,32768},
+ {13568,21518,26176,28905,30504,31441,31990,32312,32501,32611,32676,32714,32736,32749,32757,32768},
+ {13314,21218,25911,28697,30351,31333,31916,32262,32468,32590,32662,32705,32731,32746,32755,32768},
+ {13054,20908,25633,28475,30185,31214,31833,32205,32429,32564,32645,32694,32723,32741,32752,32768},
+ {12803,20603,25356,28252,30017,31093,31748,32147,32390,32538,32628,32683,32717,32737,32749,32768},
+ {12544,20286,25064,28013,29833,30956,31649,32077,32341,32504,32605,32667,32705,32729,32744,32768},
+ {12288,19968,24768,27768,29643,30815,31547,32005,32291,32470,32582,32652,32696,32723,32740,32768},
+ {12033,19647,24465,27514,29443,30664,31437,31926,32235,32431,32555,32633,32683,32714,32734,32768},
+ {11777,19321,24154,27250,29233,30504,31318,31839,32173,32387,32524,32612,32668,32704,32727,32768},
+ {11521,18991,23835,26976,29013,30334,31190,31745,32105,32338,32489,32587,32651,32692,32719,32768},
+ {11265,18657,23508,26691,28780,30151,31051,31641,32028,32282,32449,32559,32631,32678,32709,32768},
+ {11006,18316,23170,26394,28535,29957,30901,31528,31944,32220,32404,32526,32607,32661,32697,32768},
+ {10752,17976,22830,26091,28282,29754,30743,31408,31854,32154,32356,32491,32582,32643,32684,32768},
+ {10496,17630,22479,25775,28015,29538,30573,31276,31754,32079,32300,32450,32552,32621,32668,32768},
+ {10240,17280,22120,25448,27736,29309,30390,31133,31644,31995,32237,32403,32517,32595,32649,32768},
+ { 9984,16926,21753,25109,27443,29066,30194,30978,31523,31902,32166,32349,32476,32565,32627,32768},
+ { 9728,16568,21377,24759,27137,28809,29984,30811,31392,31801,32088,32290,32432,32532,32602,32768},
+ { 9474,16208,20995,24399,26819,28539,29762,30631,31249,31688,32000,32222,32380,32492,32572,32768},
+ { 9216,15840,20601,24023,26483,28251,29522,30435,31091,31563,31902,32146,32321,32447,32537,32768},
+ { 8959,15469,20199,23636,26133,27947,29265,30223,30919,31425,31792,32059,32253,32394,32496,32768},
+ { 8705,15097,19791,23238,25770,27629,28994,29997,30733,31274,31671,31963,32177,32334,32449,32768},
+ { 8449,14719,19373,22827,25390,27292,28704,29752,30530,31107,31535,31853,32089,32264,32394,32768},
+ { 8192,14336,18944,22400,24992,26936,28394,29488,30308,30923,31384,31730,31989,32184,32330,32768},
+ { 7936,13950,18507,21961,24578,26561,28064,29203,30066,30720,31216,31592,31877,32093,32256,32768},
+ { 7678,13558,18060,21507,24146,26166,27713,28897,29804,30498,31030,31437,31749,31988,32171,32768},
+ { 7423,13165,17606,21041,23698,25753,27342,28571,29522,30257,30826,31266,31606,31869,32073,32768},
+ { 7168,12768,17143,20561,23231,25317,26947,28220,29215,29992,30599,31073,31444,31734,31960,32768},
+ { 6911,12365,16669,20065,22744,24858,26526,27842,28881,29701,30348,30858,31261,31579,31830,32768},
+ { 6657,11961,16188,19556,22240,24379,26083,27441,28523,29385,30072,30620,31056,31404,31681,32768},
+ { 6400,11550,15694,19029,21712,23871,25609,27007,28132,29037,29766,30352,30824,31204,31509,32768},
+ { 6142,11134,15190,18486,21164,23340,25108,26544,27711,28659,29429,30055,30564,30977,31313,32768},
+ { 5890,10720,14682,17932,20598,22785,24579,26051,27258,28248,29060,29726,30273,30721,31089,32768},
+ { 5631,10295,14157,17356,20005,22199,24016,25520,26766,27798,28652,29359,29945,30430,30832,32768},
+ { 5377, 9871,13628,16768,19393,21587,23421,24954,26236,27308,28204,28953,29579,30102,30539,32768},
+ { 5121, 9441,13086,16161,18756,20945,22792,24351,25666,26776,27712,28502,29169,29731,30206,32768},
+ { 4865, 9007,12534,15538,18096,20274,22129,23708,25053,26198,27173,28004,28711,29313,29826,32768},
+ { 4608, 8568,11971,14896,17409,19569,21425,23020,24391,25569,26581,27451,28199,28842,29394,32768},
+ { 4351, 8125,11398,14236,16697,18831,20682,22287,23679,24886,25933,26841,27628,28311,28903,32768},
+ { 4096, 7680,10816,13560,15961,18062,19900,21508,22915,24146,25224,26167,26992,27714,28346,32768},
+ { 3840, 7230,10223,12865,15197,17256,19074,20679,22096,23347,24451,25426,26287,27047,27718,32768},
+ { 3584, 6776, 9619,12151,14406,16414,18203,19796,21215,22479,23604,24606,25499,26294,27002,32768},
+ { 3328, 6318, 9004,11417,13585,15533,17283,18856,20269,21538,22678,23703,24624,25451,26194,32768},
+ { 3072, 5856, 8379,10665,12737,14615,16317,17859,19257,20524,21672,22712,23655,24509,25283,32768},
+ { 2816, 5390, 7743, 9894,11860,13657,15299,16800,18172,19426,20573,21621,22579,23455,24255,32768},
+ { 2560, 4920, 7096, 9102,10951,12656,14227,15676,17011,18242,19377,20423,21388,22277,23097,32768},
+ { 2304, 4446, 6437, 8288,10009,11609,13097,14480,15766,16961,18072,19105,20066,20959,21789,32768},
+ { 2048, 3968, 5768, 7456, 9038,10521,11911,13215,14437,15583,16657,17664,18608,19493,20323,32768},
+ { 1792, 3486, 5087, 6601, 8032, 9385,10664,11873,13016,14096,15117,16082,16995,17858,18673,32768},
+ { 1536, 3000, 4395, 5725, 6993, 8201, 9353,10451,11497,12494,13444,14350,15213,16036,16820,32768},
+ { 1280, 2510, 3692, 4828, 5919, 6968, 7976, 8944, 9875,10769,11628,12454,13248,14011,14744,32768},
+ { 1024, 2016, 2977, 3908, 4810, 5684, 6530, 7350, 8144, 8913, 9658,10380,11080,11758,12415,32768},
+ { 768, 1518, 2250, 2965, 3663, 4345, 5011, 5662, 6297, 6917, 7523, 8115, 8693, 9257, 9808,32768},
+ { 512, 1016, 1512, 2000, 2481, 2954, 3420, 3879, 4330, 4774, 5211, 5642, 6066, 6483, 6894,32768},
+ { 256, 510, 762, 1012, 1260, 1506, 1750, 1992, 2232, 2471, 2708, 2943, 3176, 3407, 3636,32768},
+};
+
+const ent_rng daalaent_laplace_offset[] = {
+ 0, 29871, 28672, 27751, 26975, 26291, 25673, 25105,
+ 24576, 24079, 23609, 23162, 22734, 22325, 21931, 21550,
+ 21182, 20826, 20480, 20143, 19815, 19495, 19183, 18877,
+ 18579, 18286, 17999, 17718, 17442, 17170, 16904, 16642,
+ 16384, 16129, 15879, 15633, 15390, 15150, 14913, 14680,
+ 14450, 14222, 13997, 13775, 13556, 13338, 13124, 12911,
+ 12701, 12493, 12288, 12084, 11882, 11682, 11484, 11288,
+ 11094, 10901, 10710, 10521, 10333, 10147, 9962, 9779,
+ 9597, 9417, 9238, 9060, 8884, 8709, 8535, 8363,
+ 8192, 8021, 7853, 7685, 7518, 7352, 7188, 7025,
+ 6862, 6701, 6540, 6381, 6222, 6065, 5908, 5753,
+ 5598, 5444, 5291, 5138, 4987, 4837, 4687, 4538,
+ 4390, 4242, 4096, 3950, 3804, 3660, 3516, 3373,
+ 3231, 3089, 2948, 2808, 2668, 2529, 2391, 2253,
+ 2116, 1979, 1843, 1708, 1573, 1439, 1306, 1172,
+ 1040, 908, 777, 646, 516, 386, 257, 128,
+};
+
+static const uint8_t daala_zigzag4x4[][2] = {
+ { 0, 1 }, { 1, 0 }, { 1, 1 }, { 0, 2 },
+ { 2, 0 }, { 1, 2 }, { 0, 3 }, { 2, 1 },
+ { 3, 0 }, { 1, 3 }, { 3, 1 }, { 2, 2 },
+ { 2, 3 }, { 3, 2 }, { 3, 3 }
+};
+
+static const uint8_t daala_zigzag8x8[][2] = {
+ { 4, 0 }, { 5, 0 }, { 4, 1 }, { 6, 0 },
+ { 5, 1 }, { 7, 0 }, { 6, 1 }, { 7, 1 },
+ { 0, 4 }, { 0, 5 }, { 1, 4 }, { 0, 6 },
+ { 1, 5 }, { 0, 7 }, { 1, 6 }, { 1, 7 },
+ { 2, 4 }, { 4, 2 }, { 2, 5 }, { 3, 4 },
+ { 4, 3 }, { 5, 2 }, { 3, 5 }, { 2, 6 },
+ { 4, 4 }, { 5, 3 }, { 2, 7 }, { 6, 2 },
+ { 3, 6 }, { 4, 5 }, { 7, 2 }, { 5, 4 },
+ { 6, 3 }, { 3, 7 }, { 5, 5 }, { 7, 3 },
+ { 4, 6 }, { 6, 4 }, { 4, 7 }, { 5, 6 },
+ { 7, 4 }, { 6, 5 }, { 5, 7 }, { 7, 5 },
+ { 6, 6 }, { 6, 7 }, { 7, 6 }, { 7, 7 }
+};
+
+static const uint8_t daala_zigzag16x16[][2] = {
+ { 8, 0 }, { 9, 0 }, { 8, 1 }, { 10, 0 },
+ { 9, 1 }, { 11, 0 }, { 8, 2 }, { 10, 1 },
+ { 12, 0 }, { 8, 3 }, { 9, 2 }, { 13, 0 },
+ { 11, 1 }, { 9, 3 }, { 10, 2 }, { 14, 0 },
+ { 15, 0 }, { 12, 1 }, { 10, 3 }, { 11, 2 },
+ { 13, 1 }, { 11, 3 }, { 12, 2 }, { 14, 1 },
+ { 15, 1 }, { 13, 2 }, { 12, 3 }, { 14, 2 },
+ { 13, 3 }, { 15, 2 }, { 14, 3 }, { 15, 3 },
+ { 0, 8 }, { 0, 9 }, { 1, 8 }, { 0, 10 },
+ { 1, 9 }, { 2, 8 }, { 0, 11 }, { 1, 10 },
+ { 0, 12 }, { 3, 8 }, { 2, 9 }, { 0, 13 },
+ { 1, 11 }, { 2, 10 }, { 3, 9 }, { 0, 14 },
+ { 1, 12 }, { 0, 15 }, { 2, 11 }, { 3, 10 },
+ { 1, 13 }, { 2, 12 }, { 1, 14 }, { 3, 11 },
+ { 1, 15 }, { 2, 13 }, { 3, 12 }, { 2, 14 },
+ { 2, 15 }, { 3, 13 }, { 3, 14 }, { 3, 15 },
+ { 4, 8 }, { 8, 4 }, { 5, 8 }, { 4, 9 },
+ { 8, 5 }, { 9, 4 }, { 5, 9 }, { 6, 8 },
+ { 4, 10 }, { 8, 6 }, { 9, 5 }, { 5, 10 },
+ { 6, 9 }, { 10, 4 }, { 7, 8 }, { 4, 11 },
+ { 8, 7 }, { 9, 6 }, { 10, 5 }, { 5, 11 },
+ { 7, 9 }, { 6, 10 }, { 4, 12 }, { 8, 8 },
+ { 11, 4 }, { 9, 7 }, { 10, 6 }, { 4, 13 },
+ { 6, 11 }, { 7, 10 }, { 11, 5 }, { 5, 12 },
+ { 8, 9 }, { 9, 8 }, { 12, 4 }, { 10, 7 },
+ { 4, 14 }, { 5, 13 }, { 11, 6 }, { 4, 15 },
+ { 6, 12 }, { 7, 11 }, { 8, 10 }, { 12, 5 },
+ { 9, 9 }, { 13, 4 }, { 10, 8 }, { 5, 14 },
+ { 11, 7 }, { 6, 13 }, { 5, 15 }, { 7, 12 },
+ { 14, 4 }, { 13, 5 }, { 12, 6 }, { 8, 11 },
+ { 9, 10 }, { 15, 4 }, { 10, 9 }, { 6, 14 },
+ { 11, 8 }, { 7, 13 }, { 14, 5 }, { 6, 15 },
+ { 12, 7 }, { 13, 6 }, { 8, 12 }, { 9, 11 },
+ { 15, 5 }, { 10, 10 }, { 7, 14 }, { 11, 9 },
+ { 13, 7 }, { 14, 6 }, { 7, 15 }, { 12, 8 },
+ { 8, 13 }, { 9, 12 }, { 15, 6 }, { 10, 11 },
+ { 8, 14 }, { 11, 10 }, { 14, 7 }, { 13, 8 },
+ { 12, 9 }, { 8, 15 }, { 9, 13 }, { 15, 7 },
+ { 10, 12 }, { 11, 11 }, { 14, 8 }, { 13, 9 },
+ { 9, 14 }, { 12, 10 }, { 15, 8 }, { 9, 15 },
+ { 10, 13 }, { 11, 12 }, { 12, 11 }, { 14, 9 },
+ { 13, 10 }, { 10, 14 }, { 15, 9 }, { 10, 15 },
+ { 11, 13 }, { 14, 10 }, { 12, 12 }, { 13, 11 },
+ { 15, 10 }, { 11, 14 }, { 11, 15 }, { 12, 13 },
+ { 14, 11 }, { 13, 12 }, { 15, 11 }, { 12, 14 },
+ { 13, 13 }, { 14, 12 }, { 12, 15 }, { 15, 12 },
+ { 13, 14 }, { 14, 13 }, { 13, 15 }, { 15, 13 },
+ { 14, 14 }, { 14, 15 }, { 15, 14 }, { 15, 15 }
+};
+
+static const uint8_t daala_zigzag32x32[][2] = {
+ { 16, 0 }, { 17, 0 }, { 18, 0 }, { 19, 0 },
+ { 16, 1 }, { 17, 1 }, { 20, 0 }, { 16, 2 },
+ { 18, 1 }, { 21, 0 }, { 17, 2 }, { 16, 3 },
+ { 19, 1 }, { 22, 0 }, { 18, 2 }, { 17, 3 },
+ { 20, 1 }, { 16, 4 }, { 23, 0 }, { 19, 2 },
+ { 24, 0 }, { 16, 5 }, { 21, 1 }, { 17, 4 },
+ { 18, 3 }, { 20, 2 }, { 17, 5 }, { 16, 6 },
+ { 19, 3 }, { 18, 4 }, { 25, 0 }, { 22, 1 },
+ { 16, 7 }, { 21, 2 }, { 17, 6 }, { 20, 3 },
+ { 26, 0 }, { 18, 5 }, { 19, 4 }, { 17, 7 },
+ { 23, 1 }, { 22, 2 }, { 18, 6 }, { 27, 0 },
+ { 19, 5 }, { 24, 1 }, { 21, 3 }, { 28, 0 },
+ { 20, 4 }, { 18, 7 }, { 19, 6 }, { 23, 2 },
+ { 29, 0 }, { 25, 1 }, { 21, 4 }, { 30, 0 },
+ { 20, 5 }, { 22, 3 }, { 31, 0 }, { 19, 7 },
+ { 24, 2 }, { 26, 1 }, { 20, 6 }, { 21, 5 },
+ { 22, 4 }, { 23, 3 }, { 27, 1 }, { 25, 2 },
+ { 20, 7 }, { 28, 1 }, { 24, 3 }, { 21, 6 },
+ { 22, 5 }, { 23, 4 }, { 26, 2 }, { 21, 7 },
+ { 29, 1 }, { 25, 3 }, { 30, 1 }, { 27, 2 },
+ { 22, 6 }, { 23, 5 }, { 31, 1 }, { 24, 4 },
+ { 26, 3 }, { 28, 2 }, { 22, 7 }, { 23, 6 },
+ { 25, 4 }, { 24, 5 }, { 29, 2 }, { 30, 2 },
+ { 27, 3 }, { 23, 7 }, { 31, 2 }, { 24, 6 },
+ { 26, 4 }, { 25, 5 }, { 28, 3 }, { 24, 7 },
+ { 27, 4 }, { 29, 3 }, { 25, 6 }, { 26, 5 },
+ { 30, 3 }, { 31, 3 }, { 28, 4 }, { 27, 5 },
+ { 25, 7 }, { 29, 4 }, { 26, 6 }, { 28, 5 },
+ { 30, 4 }, { 26, 7 }, { 27, 6 }, { 31, 4 },
+ { 29, 5 }, { 27, 7 }, { 30, 5 }, { 28, 6 },
+ { 31, 5 }, { 29, 6 }, { 28, 7 }, { 30, 6 },
+ { 31, 6 }, { 29, 7 }, { 30, 7 }, { 31, 7 },
+ { 0, 16 }, { 0, 17 }, { 1, 16 }, { 0, 18 },
+ { 1, 17 }, { 0, 19 }, { 2, 16 }, { 1, 18 },
+ { 0, 20 }, { 2, 17 }, { 3, 16 }, { 1, 19 },
+ { 2, 18 }, { 0, 21 }, { 3, 17 }, { 4, 16 },
+ { 1, 20 }, { 2, 19 }, { 0, 22 }, { 3, 18 },
+ { 4, 17 }, { 5, 16 }, { 0, 23 }, { 3, 19 },
+ { 2, 20 }, { 1, 21 }, { 4, 18 }, { 6, 16 },
+ { 5, 17 }, { 3, 20 }, { 2, 21 }, { 1, 22 },
+ { 0, 24 }, { 0, 25 }, { 4, 19 }, { 7, 16 },
+ { 6, 17 }, { 5, 18 }, { 0, 26 }, { 3, 21 },
+ { 2, 22 }, { 1, 23 }, { 4, 20 }, { 5, 19 },
+ { 6, 18 }, { 1, 24 }, { 7, 17 }, { 0, 27 },
+ { 2, 23 }, { 3, 22 }, { 4, 21 }, { 1, 25 },
+ { 5, 20 }, { 7, 18 }, { 0, 28 }, { 6, 19 },
+ { 2, 24 }, { 1, 26 }, { 0, 29 }, { 4, 22 },
+ { 3, 23 }, { 2, 25 }, { 5, 21 }, { 0, 31 },
+ { 7, 19 }, { 6, 20 }, { 0, 30 }, { 1, 27 },
+ { 3, 24 }, { 2, 26 }, { 4, 23 }, { 5, 22 },
+ { 7, 20 }, { 1, 28 }, { 6, 21 }, { 3, 25 },
+ { 2, 27 }, { 1, 29 }, { 4, 24 }, { 2, 28 },
+ { 1, 30 }, { 7, 21 }, { 5, 23 }, { 3, 26 },
+ { 6, 22 }, { 1, 31 }, { 4, 25 }, { 7, 22 },
+ { 3, 27 }, { 2, 29 }, { 2, 30 }, { 5, 24 },
+ { 2, 31 }, { 6, 23 }, { 4, 26 }, { 3, 28 },
+ { 5, 25 }, { 3, 29 }, { 6, 24 }, { 7, 23 },
+ { 3, 30 }, { 4, 27 }, { 3, 31 }, { 5, 26 },
+ { 6, 25 }, { 4, 28 }, { 7, 24 }, { 4, 29 },
+ { 5, 27 }, { 4, 30 }, { 4, 31 }, { 6, 26 },
+ { 5, 28 }, { 7, 25 }, { 6, 27 }, { 5, 29 },
+ { 7, 26 }, { 5, 30 }, { 5, 31 }, { 6, 28 },
+ { 7, 27 }, { 6, 29 }, { 6, 30 }, { 7, 28 },
+ { 6, 31 }, { 7, 29 }, { 7, 30 }, { 7, 31 },
+ { 8, 16 }, { 9, 16 }, { 8, 17 }, { 10, 16 },
+ { 9, 17 }, { 16, 8 }, { 8, 18 }, { 16, 9 },
+ { 10, 17 }, { 11, 16 }, { 17, 8 }, { 9, 18 },
+ { 8, 19 }, { 16, 10 }, { 11, 17 }, { 12, 16 },
+ { 10, 18 }, { 17, 9 }, { 9, 19 }, { 16, 11 },
+ { 8, 20 }, { 18, 8 }, { 17, 10 }, { 10, 19 },
+ { 12, 17 }, { 11, 18 }, { 9, 20 }, { 16, 12 },
+ { 18, 9 }, { 8, 21 }, { 13, 16 }, { 17, 11 },
+ { 19, 8 }, { 18, 10 }, { 13, 17 }, { 16, 13 },
+ { 11, 19 }, { 12, 18 }, { 10, 20 }, { 17, 12 },
+ { 9, 21 }, { 19, 9 }, { 8, 22 }, { 14, 16 },
+ { 18, 11 }, { 11, 20 }, { 10, 21 }, { 20, 8 },
+ { 13, 18 }, { 16, 14 }, { 12, 19 }, { 17, 13 },
+ { 19, 10 }, { 14, 17 }, { 9, 22 }, { 18, 12 },
+ { 8, 23 }, { 17, 14 }, { 20, 9 }, { 15, 16 },
+ { 16, 15 }, { 13, 19 }, { 10, 22 }, { 19, 11 },
+ { 11, 21 }, { 14, 18 }, { 12, 20 }, { 18, 13 },
+ { 20, 10 }, { 21, 8 }, { 15, 17 }, { 9, 23 },
+ { 19, 12 }, { 11, 22 }, { 8, 24 }, { 21, 9 },
+ { 17, 15 }, { 16, 16 }, { 14, 19 }, { 18, 14 },
+ { 12, 21 }, { 13, 20 }, { 20, 11 }, { 10, 23 },
+ { 19, 13 }, { 15, 18 }, { 16, 17 }, { 21, 10 },
+ { 22, 8 }, { 9, 24 }, { 8, 25 }, { 20, 12 },
+ { 15, 19 }, { 11, 23 }, { 17, 16 }, { 18, 15 },
+ { 14, 20 }, { 12, 22 }, { 10, 24 }, { 22, 9 },
+ { 21, 11 }, { 19, 14 }, { 13, 21 }, { 16, 18 },
+ { 9, 25 }, { 17, 17 }, { 8, 26 }, { 20, 13 },
+ { 23, 8 }, { 12, 23 }, { 13, 22 }, { 22, 10 },
+ { 19, 15 }, { 15, 20 }, { 16, 19 }, { 21, 12 },
+ { 11, 24 }, { 14, 21 }, { 8, 27 }, { 18, 16 },
+ { 10, 25 }, { 9, 26 }, { 22, 11 }, { 20, 14 },
+ { 23, 9 }, { 18, 17 }, { 17, 18 }, { 17, 19 },
+ { 19, 16 }, { 21, 13 }, { 10, 26 }, { 12, 24 },
+ { 23, 10 }, { 24, 8 }, { 8, 28 }, { 16, 20 },
+ { 9, 27 }, { 15, 21 }, { 22, 12 }, { 14, 22 },
+ { 13, 23 }, { 20, 15 }, { 11, 25 }, { 24, 9 },
+ { 18, 18 }, { 19, 17 }, { 23, 11 }, { 10, 27 },
+ { 8, 29 }, { 12, 25 }, { 9, 28 }, { 8, 30 },
+ { 21, 14 }, { 13, 24 }, { 11, 26 }, { 25, 8 },
+ { 24, 10 }, { 20, 16 }, { 19, 18 }, { 14, 23 },
+ { 22, 13 }, { 8, 31 }, { 17, 20 }, { 9, 29 },
+ { 23, 12 }, { 15, 22 }, { 25, 9 }, { 11, 27 },
+ { 10, 28 }, { 20, 17 }, { 21, 15 }, { 18, 19 },
+ { 16, 21 }, { 24, 11 }, { 9, 30 }, { 12, 26 },
+ { 10, 29 }, { 22, 14 }, { 14, 24 }, { 9, 31 },
+ { 26, 8 }, { 13, 25 }, { 25, 10 }, { 18, 20 },
+ { 19, 19 }, { 11, 28 }, { 15, 23 }, { 20, 18 },
+ { 10, 30 }, { 12, 27 }, { 17, 21 }, { 23, 13 },
+ { 24, 12 }, { 21, 16 }, { 16, 22 }, { 26, 9 },
+ { 27, 8 }, { 13, 26 }, { 22, 15 }, { 10, 31 },
+ { 14, 25 }, { 12, 28 }, { 25, 11 }, { 21, 17 },
+ { 26, 10 }, { 20, 19 }, { 11, 29 }, { 15, 24 },
+ { 23, 14 }, { 27, 9 }, { 11, 30 }, { 13, 27 },
+ { 19, 20 }, { 24, 13 }, { 28, 8 }, { 11, 31 },
+ { 22, 16 }, { 17, 22 }, { 16, 23 }, { 25, 12 },
+ { 18, 21 }, { 12, 29 }, { 21, 18 }, { 28, 9 },
+ { 27, 10 }, { 26, 11 }, { 29, 8 }, { 14, 26 },
+ { 15, 25 }, { 13, 28 }, { 12, 30 }, { 23, 15 },
+ { 30, 8 }, { 16, 24 }, { 13, 29 }, { 25, 13 },
+ { 24, 14 }, { 20, 20 }, { 31, 8 }, { 12, 31 },
+ { 14, 27 }, { 28, 10 }, { 26, 12 }, { 22, 17 },
+ { 21, 19 }, { 17, 23 }, { 18, 22 }, { 29, 9 },
+ { 27, 11 }, { 19, 21 }, { 27, 12 }, { 30, 9 },
+ { 31, 9 }, { 13, 30 }, { 24, 15 }, { 23, 16 },
+ { 15, 26 }, { 14, 28 }, { 29, 10 }, { 28, 11 },
+ { 26, 13 }, { 17, 24 }, { 13, 31 }, { 25, 14 },
+ { 22, 18 }, { 16, 25 }, { 30, 10 }, { 14, 29 },
+ { 15, 27 }, { 19, 22 }, { 21, 20 }, { 20, 21 },
+ { 27, 13 }, { 29, 11 }, { 18, 23 }, { 23, 17 },
+ { 16, 26 }, { 31, 10 }, { 24, 16 }, { 14, 30 },
+ { 22, 19 }, { 14, 31 }, { 28, 12 }, { 26, 14 },
+ { 30, 11 }, { 15, 28 }, { 25, 15 }, { 17, 25 },
+ { 23, 18 }, { 18, 24 }, { 15, 30 }, { 29, 12 },
+ { 31, 11 }, { 16, 27 }, { 24, 17 }, { 28, 13 },
+ { 19, 23 }, { 15, 29 }, { 25, 16 }, { 17, 26 },
+ { 27, 14 }, { 22, 20 }, { 15, 31 }, { 20, 22 },
+ { 21, 21 }, { 16, 28 }, { 17, 27 }, { 30, 12 },
+ { 26, 15 }, { 19, 24 }, { 18, 25 }, { 23, 19 },
+ { 29, 13 }, { 31, 12 }, { 24, 18 }, { 26, 16 },
+ { 25, 17 }, { 16, 29 }, { 28, 14 }, { 20, 23 },
+ { 18, 26 }, { 21, 22 }, { 19, 25 }, { 22, 21 },
+ { 27, 15 }, { 17, 28 }, { 16, 30 }, { 26, 17 },
+ { 23, 20 }, { 16, 31 }, { 25, 18 }, { 27, 16 },
+ { 20, 24 }, { 24, 19 }, { 31, 13 }, { 30, 13 },
+ { 29, 14 }, { 18, 27 }, { 28, 15 }, { 17, 29 },
+ { 19, 26 }, { 17, 30 }, { 21, 23 }, { 22, 22 },
+ { 30, 14 }, { 20, 25 }, { 23, 21 }, { 17, 31 },
+ { 18, 28 }, { 25, 19 }, { 24, 20 }, { 28, 16 },
+ { 31, 14 }, { 26, 18 }, { 19, 27 }, { 29, 15 },
+ { 27, 17 }, { 30, 15 }, { 21, 24 }, { 22, 23 },
+ { 26, 19 }, { 23, 22 }, { 28, 17 }, { 29, 16 },
+ { 18, 30 }, { 24, 21 }, { 25, 20 }, { 18, 31 },
+ { 18, 29 }, { 20, 26 }, { 19, 28 }, { 27, 18 },
+ { 31, 15 }, { 20, 27 }, { 30, 16 }, { 19, 29 },
+ { 29, 17 }, { 31, 16 }, { 27, 19 }, { 21, 25 },
+ { 28, 18 }, { 26, 20 }, { 22, 24 }, { 25, 21 },
+ { 19, 30 }, { 24, 22 }, { 30, 17 }, { 21, 26 },
+ { 23, 23 }, { 19, 31 }, { 20, 28 }, { 31, 17 },
+ { 28, 19 }, { 27, 20 }, { 21, 27 }, { 29, 18 },
+ { 30, 18 }, { 25, 22 }, { 26, 21 }, { 20, 29 },
+ { 22, 25 }, { 24, 23 }, { 29, 19 }, { 23, 24 },
+ { 20, 31 }, { 20, 30 }, { 28, 20 }, { 21, 28 },
+ { 22, 26 }, { 31, 18 }, { 27, 21 }, { 30, 19 },
+ { 22, 27 }, { 29, 20 }, { 23, 25 }, { 24, 24 },
+ { 26, 22 }, { 21, 29 }, { 25, 23 }, { 31, 19 },
+ { 21, 30 }, { 23, 26 }, { 28, 21 }, { 21, 31 },
+ { 22, 28 }, { 30, 20 }, { 25, 24 }, { 27, 22 },
+ { 29, 21 }, { 26, 23 }, { 24, 25 }, { 31, 20 },
+ { 23, 27 }, { 22, 29 }, { 30, 21 }, { 28, 22 },
+ { 24, 26 }, { 25, 25 }, { 27, 23 }, { 22, 30 },
+ { 23, 28 }, { 22, 31 }, { 26, 24 }, { 31, 21 },
+ { 24, 27 }, { 29, 22 }, { 27, 24 }, { 30, 22 },
+ { 25, 26 }, { 28, 23 }, { 23, 30 }, { 23, 29 },
+ { 24, 28 }, { 25, 27 }, { 31, 22 }, { 23, 31 },
+ { 26, 25 }, { 28, 24 }, { 29, 23 }, { 24, 29 },
+ { 24, 30 }, { 27, 25 }, { 25, 28 }, { 26, 26 },
+ { 30, 23 }, { 26, 27 }, { 31, 23 }, { 28, 25 },
+ { 27, 26 }, { 25, 29 }, { 24, 31 }, { 29, 24 },
+ { 30, 24 }, { 27, 27 }, { 29, 25 }, { 26, 28 },
+ { 31, 24 }, { 25, 30 }, { 25, 31 }, { 28, 26 },
+ { 27, 28 }, { 26, 29 }, { 30, 25 }, { 29, 26 },
+ { 28, 27 }, { 26, 30 }, { 31, 25 }, { 27, 29 },
+ { 26, 31 }, { 30, 26 }, { 28, 28 }, { 31, 26 },
+ { 29, 27 }, { 27, 30 }, { 28, 29 }, { 27, 31 },
+ { 30, 27 }, { 31, 27 }, { 28, 30 }, { 29, 28 },
+ { 30, 28 }, { 29, 29 }, { 30, 29 }, { 31, 28 },
+ { 28, 31 }, { 29, 30 }, { 29, 31 }, { 31, 29 },
+ { 30, 30 }, { 30, 31 }, { 31, 30 }, { 31, 31 }
+};
+
+static const uint8_t daala_zigzag64x64[][2] = {
+ { 32, 0 }, { 33, 0 }, { 34, 0 }, { 35, 0 },
+ { 32, 1 }, { 36, 0 }, { 33, 1 }, { 37, 0 },
+ { 34, 1 }, { 32, 2 }, { 38, 0 }, { 35, 1 },
+ { 33, 2 }, { 39, 0 }, { 36, 1 }, { 34, 2 },
+ { 40, 0 }, { 32, 3 }, { 37, 1 }, { 35, 2 },
+ { 41, 0 }, { 33, 3 }, { 38, 1 }, { 36, 2 },
+ { 42, 0 }, { 32, 4 }, { 34, 3 }, { 39, 1 },
+ { 37, 2 }, { 43, 0 }, { 35, 3 }, { 33, 4 },
+ { 40, 1 }, { 44, 0 }, { 38, 2 }, { 32, 5 },
+ { 36, 3 }, { 41, 1 }, { 34, 4 }, { 45, 0 },
+ { 39, 2 }, { 33, 5 }, { 42, 1 }, { 37, 3 },
+ { 35, 4 }, { 46, 0 }, { 40, 2 }, { 32, 6 },
+ { 43, 1 }, { 34, 5 }, { 38, 3 }, { 36, 4 },
+ { 47, 0 }, { 41, 2 }, { 33, 6 }, { 44, 1 },
+ { 39, 3 }, { 35, 5 }, { 37, 4 }, { 48, 0 },
+ { 32, 7 }, { 42, 2 }, { 34, 6 }, { 45, 1 },
+ { 40, 3 }, { 36, 5 }, { 38, 4 }, { 49, 0 },
+ { 33, 7 }, { 43, 2 }, { 46, 1 }, { 35, 6 },
+ { 41, 3 }, { 37, 5 }, { 39, 4 }, { 32, 8 },
+ { 50, 0 }, { 44, 2 }, { 34, 7 }, { 47, 1 },
+ { 36, 6 }, { 42, 3 }, { 38, 5 }, { 51, 0 },
+ { 40, 4 }, { 33, 8 }, { 45, 2 }, { 35, 7 },
+ { 48, 1 }, { 32, 9 }, { 37, 6 }, { 43, 3 },
+ { 52, 0 }, { 39, 5 }, { 41, 4 }, { 34, 8 },
+ { 46, 2 }, { 49, 1 }, { 36, 7 }, { 33, 9 },
+ { 44, 3 }, { 53, 0 }, { 38, 6 }, { 42, 4 },
+ { 40, 5 }, { 35, 8 }, { 32, 10 }, { 47, 2 },
+ { 50, 1 }, { 37, 7 }, { 54, 0 }, { 45, 3 },
+ { 34, 9 }, { 39, 6 }, { 43, 4 }, { 41, 5 },
+ { 48, 2 }, { 36, 8 }, { 51, 1 }, { 33, 10 },
+ { 55, 0 }, { 38, 7 }, { 46, 3 }, { 32, 11 },
+ { 40, 6 }, { 35, 9 }, { 44, 4 }, { 42, 5 },
+ { 49, 2 }, { 52, 1 }, { 37, 8 }, { 34, 10 },
+ { 56, 0 }, { 39, 7 }, { 47, 3 }, { 41, 6 },
+ { 33, 11 }, { 45, 4 }, { 36, 9 }, { 43, 5 },
+ { 50, 2 }, { 53, 1 }, { 32, 12 }, { 38, 8 },
+ { 57, 0 }, { 35, 10 }, { 48, 3 }, { 40, 7 },
+ { 42, 6 }, { 46, 4 }, { 44, 5 }, { 34, 11 },
+ { 37, 9 }, { 51, 2 }, { 54, 1 }, { 58, 0 },
+ { 39, 8 }, { 33, 12 }, { 36, 10 }, { 49, 3 },
+ { 41, 7 }, { 32, 13 }, { 47, 4 }, { 43, 6 },
+ { 45, 5 }, { 52, 2 }, { 55, 1 }, { 38, 9 },
+ { 35, 11 }, { 59, 0 }, { 40, 8 }, { 34, 12 },
+ { 50, 3 }, { 37, 10 }, { 42, 7 }, { 48, 4 },
+ { 44, 6 }, { 33, 13 }, { 56, 1 }, { 53, 2 },
+ { 46, 5 }, { 60, 0 }, { 39, 9 }, { 36, 11 },
+ { 32, 14 }, { 41, 8 }, { 51, 3 }, { 35, 12 },
+ { 38, 10 }, { 43, 7 }, { 49, 4 }, { 57, 1 },
+ { 54, 2 }, { 45, 6 }, { 47, 5 }, { 61, 0 },
+ { 34, 13 }, { 40, 9 }, { 37, 11 }, { 33, 14 },
+ { 52, 3 }, { 42, 8 }, { 36, 12 }, { 32, 15 },
+ { 39, 10 }, { 44, 7 }, { 58, 1 }, { 50, 4 },
+ { 55, 2 }, { 62, 0 }, { 46, 6 }, { 48, 5 },
+ { 35, 13 }, { 41, 9 }, { 38, 11 }, { 53, 3 },
+ { 34, 14 }, { 43, 8 }, { 59, 1 }, { 63, 0 },
+ { 56, 2 }, { 51, 4 }, { 37, 12 }, { 45, 7 },
+ { 40, 10 }, { 33, 15 }, { 47, 6 }, { 49, 5 },
+ { 36, 13 }, { 42, 9 }, { 39, 11 }, { 54, 3 },
+ { 44, 8 }, { 60, 1 }, { 35, 14 }, { 57, 2 },
+ { 52, 4 }, { 46, 7 }, { 41, 10 }, { 38, 12 },
+ { 50, 5 }, { 48, 6 }, { 34, 15 }, { 43, 9 },
+ { 37, 13 }, { 55, 3 }, { 40, 11 }, { 61, 1 },
+ { 45, 8 }, { 58, 2 }, { 53, 4 }, { 36, 14 },
+ { 47, 7 }, { 51, 5 }, { 42, 10 }, { 49, 6 },
+ { 39, 12 }, { 35, 15 }, { 56, 3 }, { 44, 9 },
+ { 38, 13 }, { 62, 1 }, { 41, 11 }, { 59, 2 },
+ { 46, 8 }, { 54, 4 }, { 48, 7 }, { 37, 14 },
+ { 52, 5 }, { 50, 6 }, { 43, 10 }, { 40, 12 },
+ { 36, 15 }, { 57, 3 }, { 45, 9 }, { 63, 1 },
+ { 39, 13 }, { 60, 2 }, { 42, 11 }, { 47, 8 },
+ { 55, 4 }, { 49, 7 }, { 53, 5 }, { 51, 6 },
+ { 38, 14 }, { 44, 10 }, { 41, 12 }, { 58, 3 },
+ { 37, 15 }, { 46, 9 }, { 61, 2 }, { 40, 13 },
+ { 56, 4 }, { 43, 11 }, { 48, 8 }, { 54, 5 },
+ { 50, 7 }, { 52, 6 }, { 39, 14 }, { 45, 10 },
+ { 59, 3 }, { 42, 12 }, { 62, 2 }, { 47, 9 },
+ { 38, 15 }, { 57, 4 }, { 41, 13 }, { 44, 11 },
+ { 49, 8 }, { 55, 5 }, { 51, 7 }, { 53, 6 },
+ { 60, 3 }, { 46, 10 }, { 40, 14 }, { 43, 12 },
+ { 63, 2 }, { 48, 9 }, { 58, 4 }, { 39, 15 },
+ { 50, 8 }, { 45, 11 }, { 42, 13 }, { 56, 5 },
+ { 52, 7 }, { 54, 6 }, { 61, 3 }, { 47, 10 },
+ { 41, 14 }, { 44, 12 }, { 49, 9 }, { 59, 4 },
+ { 40, 15 }, { 51, 8 }, { 57, 5 }, { 46, 11 },
+ { 43, 13 }, { 53, 7 }, { 55, 6 }, { 62, 3 },
+ { 48, 10 }, { 42, 14 }, { 45, 12 }, { 60, 4 },
+ { 50, 9 }, { 52, 8 }, { 58, 5 }, { 41, 15 },
+ { 47, 11 }, { 54, 7 }, { 56, 6 }, { 44, 13 },
+ { 63, 3 }, { 49, 10 }, { 43, 14 }, { 61, 4 },
+ { 46, 12 }, { 51, 9 }, { 59, 5 }, { 53, 8 },
+ { 42, 15 }, { 57, 6 }, { 55, 7 }, { 48, 11 },
+ { 45, 13 }, { 50, 10 }, { 62, 4 }, { 44, 14 },
+ { 47, 12 }, { 52, 9 }, { 60, 5 }, { 54, 8 },
+ { 58, 6 }, { 56, 7 }, { 49, 11 }, { 43, 15 },
+ { 46, 13 }, { 51, 10 }, { 63, 4 }, { 48, 12 },
+ { 45, 14 }, { 53, 9 }, { 61, 5 }, { 55, 8 },
+ { 59, 6 }, { 57, 7 }, { 50, 11 }, { 44, 15 },
+ { 47, 13 }, { 52, 10 }, { 54, 9 }, { 49, 12 },
+ { 62, 5 }, { 46, 14 }, { 56, 8 }, { 60, 6 },
+ { 58, 7 }, { 51, 11 }, { 45, 15 }, { 48, 13 },
+ { 53, 10 }, { 63, 5 }, { 55, 9 }, { 50, 12 },
+ { 47, 14 }, { 57, 8 }, { 61, 6 }, { 59, 7 },
+ { 52, 11 }, { 46, 15 }, { 49, 13 }, { 54, 10 },
+ { 56, 9 }, { 51, 12 }, { 62, 6 }, { 58, 8 },
+ { 48, 14 }, { 60, 7 }, { 53, 11 }, { 47, 15 },
+ { 50, 13 }, { 55, 10 }, { 57, 9 }, { 63, 6 },
+ { 52, 12 }, { 59, 8 }, { 61, 7 }, { 49, 14 },
+ { 54, 11 }, { 51, 13 }, { 48, 15 }, { 56, 10 },
+ { 58, 9 }, { 60, 8 }, { 53, 12 }, { 62, 7 },
+ { 50, 14 }, { 55, 11 }, { 52, 13 }, { 49, 15 },
+ { 57, 10 }, { 59, 9 }, { 61, 8 }, { 63, 7 },
+ { 54, 12 }, { 51, 14 }, { 56, 11 }, { 53, 13 },
+ { 58, 10 }, { 50, 15 }, { 60, 9 }, { 62, 8 },
+ { 55, 12 }, { 52, 14 }, { 57, 11 }, { 59, 10 },
+ { 54, 13 }, { 51, 15 }, { 61, 9 }, { 63, 8 },
+ { 56, 12 }, { 53, 14 }, { 58, 11 }, { 60, 10 },
+ { 55, 13 }, { 52, 15 }, { 62, 9 }, { 57, 12 },
+ { 54, 14 }, { 59, 11 }, { 61, 10 }, { 56, 13 },
+ { 63, 9 }, { 53, 15 }, { 58, 12 }, { 55, 14 },
+ { 60, 11 }, { 62, 10 }, { 57, 13 }, { 54, 15 },
+ { 59, 12 }, { 56, 14 }, { 61, 11 }, { 63, 10 },
+ { 58, 13 }, { 55, 15 }, { 60, 12 }, { 57, 14 },
+ { 62, 11 }, { 59, 13 }, { 56, 15 }, { 61, 12 },
+ { 63, 11 }, { 58, 14 }, { 60, 13 }, { 57, 15 },
+ { 62, 12 }, { 59, 14 }, { 61, 13 }, { 58, 15 },
+ { 63, 12 }, { 60, 14 }, { 62, 13 }, { 59, 15 },
+ { 61, 14 }, { 63, 13 }, { 60, 15 }, { 62, 14 },
+ { 61, 15 }, { 63, 14 }, { 62, 15 }, { 63, 15 },
+ { 0, 32 }, { 0, 33 }, { 0, 34 }, { 0, 35 },
+ { 1, 32 }, { 0, 36 }, { 1, 33 }, { 0, 37 },
+ { 1, 34 }, { 2, 32 }, { 0, 38 }, { 1, 35 },
+ { 2, 33 }, { 0, 39 }, { 1, 36 }, { 2, 34 },
+ { 0, 40 }, { 3, 32 }, { 1, 37 }, { 2, 35 },
+ { 0, 41 }, { 3, 33 }, { 1, 38 }, { 2, 36 },
+ { 0, 42 }, { 4, 32 }, { 3, 34 }, { 1, 39 },
+ { 2, 37 }, { 0, 43 }, { 3, 35 }, { 4, 33 },
+ { 1, 40 }, { 0, 44 }, { 2, 38 }, { 5, 32 },
+ { 3, 36 }, { 1, 41 }, { 4, 34 }, { 0, 45 },
+ { 2, 39 }, { 5, 33 }, { 1, 42 }, { 3, 37 },
+ { 4, 35 }, { 0, 46 }, { 2, 40 }, { 6, 32 },
+ { 1, 43 }, { 5, 34 }, { 3, 38 }, { 4, 36 },
+ { 0, 47 }, { 2, 41 }, { 6, 33 }, { 1, 44 },
+ { 3, 39 }, { 5, 35 }, { 4, 37 }, { 0, 48 },
+ { 7, 32 }, { 2, 42 }, { 6, 34 }, { 1, 45 },
+ { 3, 40 }, { 5, 36 }, { 4, 38 }, { 0, 49 },
+ { 7, 33 }, { 2, 43 }, { 1, 46 }, { 6, 35 },
+ { 3, 41 }, { 5, 37 }, { 4, 39 }, { 8, 32 },
+ { 0, 50 }, { 2, 44 }, { 7, 34 }, { 1, 47 },
+ { 6, 36 }, { 3, 42 }, { 5, 38 }, { 0, 51 },
+ { 4, 40 }, { 8, 33 }, { 2, 45 }, { 7, 35 },
+ { 1, 48 }, { 9, 32 }, { 6, 37 }, { 3, 43 },
+ { 0, 52 }, { 5, 39 }, { 4, 41 }, { 8, 34 },
+ { 2, 46 }, { 1, 49 }, { 7, 36 }, { 9, 33 },
+ { 3, 44 }, { 0, 53 }, { 6, 38 }, { 4, 42 },
+ { 5, 40 }, { 8, 35 }, { 10, 32 }, { 2, 47 },
+ { 1, 50 }, { 7, 37 }, { 0, 54 }, { 3, 45 },
+ { 9, 34 }, { 6, 39 }, { 4, 43 }, { 5, 41 },
+ { 2, 48 }, { 8, 36 }, { 1, 51 }, { 10, 33 },
+ { 0, 55 }, { 7, 38 }, { 3, 46 }, { 11, 32 },
+ { 6, 40 }, { 9, 35 }, { 4, 44 }, { 5, 42 },
+ { 2, 49 }, { 1, 52 }, { 8, 37 }, { 10, 34 },
+ { 0, 56 }, { 7, 39 }, { 3, 47 }, { 6, 41 },
+ { 11, 33 }, { 4, 45 }, { 9, 36 }, { 5, 43 },
+ { 2, 50 }, { 1, 53 }, { 12, 32 }, { 8, 38 },
+ { 0, 57 }, { 10, 35 }, { 3, 48 }, { 7, 40 },
+ { 6, 42 }, { 4, 46 }, { 5, 44 }, { 11, 34 },
+ { 9, 37 }, { 2, 51 }, { 1, 54 }, { 0, 58 },
+ { 8, 39 }, { 12, 33 }, { 10, 36 }, { 3, 49 },
+ { 7, 41 }, { 13, 32 }, { 4, 47 }, { 6, 43 },
+ { 5, 45 }, { 2, 52 }, { 1, 55 }, { 9, 38 },
+ { 11, 35 }, { 0, 59 }, { 8, 40 }, { 12, 34 },
+ { 3, 50 }, { 10, 37 }, { 7, 42 }, { 4, 48 },
+ { 6, 44 }, { 13, 33 }, { 1, 56 }, { 2, 53 },
+ { 5, 46 }, { 0, 60 }, { 9, 39 }, { 11, 36 },
+ { 14, 32 }, { 8, 41 }, { 3, 51 }, { 12, 35 },
+ { 10, 38 }, { 7, 43 }, { 4, 49 }, { 1, 57 },
+ { 2, 54 }, { 6, 45 }, { 5, 47 }, { 0, 61 },
+ { 13, 34 }, { 9, 40 }, { 11, 37 }, { 14, 33 },
+ { 3, 52 }, { 8, 42 }, { 12, 36 }, { 15, 32 },
+ { 10, 39 }, { 7, 44 }, { 1, 58 }, { 4, 50 },
+ { 2, 55 }, { 0, 62 }, { 6, 46 }, { 5, 48 },
+ { 13, 35 }, { 9, 41 }, { 11, 38 }, { 3, 53 },
+ { 14, 34 }, { 8, 43 }, { 1, 59 }, { 0, 63 },
+ { 2, 56 }, { 4, 51 }, { 12, 37 }, { 7, 45 },
+ { 10, 40 }, { 15, 33 }, { 6, 47 }, { 5, 49 },
+ { 13, 36 }, { 9, 42 }, { 11, 39 }, { 3, 54 },
+ { 8, 44 }, { 1, 60 }, { 14, 35 }, { 2, 57 },
+ { 4, 52 }, { 7, 46 }, { 10, 41 }, { 12, 38 },
+ { 5, 50 }, { 6, 48 }, { 15, 34 }, { 9, 43 },
+ { 13, 37 }, { 3, 55 }, { 11, 40 }, { 1, 61 },
+ { 8, 45 }, { 2, 58 }, { 4, 53 }, { 14, 36 },
+ { 7, 47 }, { 5, 51 }, { 10, 42 }, { 6, 49 },
+ { 12, 39 }, { 15, 35 }, { 3, 56 }, { 9, 44 },
+ { 13, 38 }, { 1, 62 }, { 11, 41 }, { 2, 59 },
+ { 8, 46 }, { 4, 54 }, { 7, 48 }, { 14, 37 },
+ { 5, 52 }, { 6, 50 }, { 10, 43 }, { 12, 40 },
+ { 15, 36 }, { 3, 57 }, { 9, 45 }, { 1, 63 },
+ { 13, 39 }, { 2, 60 }, { 11, 42 }, { 8, 47 },
+ { 4, 55 }, { 7, 49 }, { 5, 53 }, { 6, 51 },
+ { 14, 38 }, { 10, 44 }, { 12, 41 }, { 3, 58 },
+ { 15, 37 }, { 9, 46 }, { 2, 61 }, { 13, 40 },
+ { 4, 56 }, { 11, 43 }, { 8, 48 }, { 5, 54 },
+ { 7, 50 }, { 6, 52 }, { 14, 39 }, { 10, 45 },
+ { 3, 59 }, { 12, 42 }, { 2, 62 }, { 9, 47 },
+ { 15, 38 }, { 4, 57 }, { 13, 41 }, { 11, 44 },
+ { 8, 49 }, { 5, 55 }, { 7, 51 }, { 6, 53 },
+ { 3, 60 }, { 10, 46 }, { 14, 40 }, { 12, 43 },
+ { 2, 63 }, { 9, 48 }, { 4, 58 }, { 15, 39 },
+ { 8, 50 }, { 11, 45 }, { 13, 42 }, { 5, 56 },
+ { 7, 52 }, { 6, 54 }, { 3, 61 }, { 10, 47 },
+ { 14, 41 }, { 12, 44 }, { 9, 49 }, { 4, 59 },
+ { 15, 40 }, { 8, 51 }, { 5, 57 }, { 11, 46 },
+ { 13, 43 }, { 7, 53 }, { 6, 55 }, { 3, 62 },
+ { 10, 48 }, { 14, 42 }, { 12, 45 }, { 4, 60 },
+ { 9, 50 }, { 8, 52 }, { 5, 58 }, { 15, 41 },
+ { 11, 47 }, { 7, 54 }, { 6, 56 }, { 13, 44 },
+ { 3, 63 }, { 10, 49 }, { 14, 43 }, { 4, 61 },
+ { 12, 46 }, { 9, 51 }, { 5, 59 }, { 8, 53 },
+ { 15, 42 }, { 6, 57 }, { 7, 55 }, { 11, 48 },
+ { 13, 45 }, { 10, 50 }, { 4, 62 }, { 14, 44 },
+ { 12, 47 }, { 9, 52 }, { 5, 60 }, { 8, 54 },
+ { 6, 58 }, { 7, 56 }, { 11, 49 }, { 15, 43 },
+ { 13, 46 }, { 10, 51 }, { 4, 63 }, { 12, 48 },
+ { 14, 45 }, { 9, 53 }, { 5, 61 }, { 8, 55 },
+ { 6, 59 }, { 7, 57 }, { 11, 50 }, { 15, 44 },
+ { 13, 47 }, { 10, 52 }, { 9, 54 }, { 12, 49 },
+ { 5, 62 }, { 14, 46 }, { 8, 56 }, { 6, 60 },
+ { 7, 58 }, { 11, 51 }, { 15, 45 }, { 13, 48 },
+ { 10, 53 }, { 5, 63 }, { 9, 55 }, { 12, 50 },
+ { 14, 47 }, { 8, 57 }, { 6, 61 }, { 7, 59 },
+ { 11, 52 }, { 15, 46 }, { 13, 49 }, { 10, 54 },
+ { 9, 56 }, { 12, 51 }, { 6, 62 }, { 8, 58 },
+ { 14, 48 }, { 7, 60 }, { 11, 53 }, { 15, 47 },
+ { 12, 52 }, { 8, 59 }, { 7, 61 }, { 14, 49 },
+ { 13, 50 }, { 10, 55 }, { 9, 57 }, { 6, 63 },
+ { 11, 54 }, { 13, 51 }, { 15, 48 }, { 10, 56 },
+ { 9, 58 }, { 8, 60 }, { 12, 53 }, { 7, 62 },
+ { 14, 50 }, { 11, 55 }, { 13, 52 }, { 15, 49 },
+ { 10, 57 }, { 9, 59 }, { 8, 61 }, { 7, 63 },
+ { 12, 54 }, { 14, 51 }, { 11, 56 }, { 13, 53 },
+ { 10, 58 }, { 15, 50 }, { 9, 60 }, { 8, 62 },
+ { 12, 55 }, { 14, 52 }, { 11, 57 }, { 10, 59 },
+ { 13, 54 }, { 15, 51 }, { 9, 61 }, { 8 , 63 },
+ { 12, 56 }, { 14, 53 }, { 11, 58 }, { 10, 60 },
+ { 13, 55 }, { 15, 52 }, { 9, 62 }, { 12, 57 },
+ { 14, 54 }, { 11, 59 }, { 10, 61 }, { 13, 56 },
+ { 9, 63 }, { 15, 53 }, { 12, 58 }, { 14, 55 },
+ { 11, 60 }, { 10, 62 }, { 13, 57 }, { 15, 54 },
+ { 12, 59 }, { 14, 56 }, { 11, 61 }, { 10, 63 },
+ { 13, 58 }, { 15, 55 }, { 12, 60 }, { 14, 57 },
+ { 11, 62 }, { 13, 59 }, { 15, 56 }, { 12, 61 },
+ { 11, 63 }, { 14, 58 }, { 13, 60 }, { 15, 57 },
+ { 12, 62 }, { 14, 59 }, { 13, 61 }, { 15, 58 },
+ { 12, 63 }, { 14, 60 }, { 13, 62 }, { 15, 59 },
+ { 14, 61 }, { 13, 63 }, { 15, 60 }, { 14, 62 },
+ { 15, 61 }, { 14, 63 }, { 15, 62 }, { 15, 63 },
+ { 32, 16 }, { 16, 32 }, { 33, 16 }, { 16, 33 },
+ { 32, 17 }, { 17, 32 }, { 34, 16 }, { 16, 34 },
+ { 33, 17 }, { 17, 33 }, { 32, 18 }, { 18, 32 },
+ { 35, 16 }, { 16, 35 }, { 34, 17 }, { 17, 34 },
+ { 33, 18 }, { 18, 33 }, { 32, 19 }, { 19, 32 },
+ { 36, 16 }, { 16, 36 }, { 35, 17 }, { 17, 35 },
+ { 34, 18 }, { 18, 34 }, { 33, 19 }, { 19, 33 },
+ { 32, 20 }, { 20, 32 }, { 37, 16 }, { 16, 37 },
+ { 36, 17 }, { 17, 36 }, { 35, 18 }, { 18, 35 },
+ { 34, 19 }, { 19, 34 }, { 33, 20 }, { 20, 33 },
+ { 38, 16 }, { 16, 38 }, { 32, 21 }, { 21, 32 },
+ { 37, 17 }, { 17, 37 }, { 36, 18 }, { 18, 36 },
+ { 35, 19 }, { 19, 35 }, { 39, 16 }, { 16, 39 },
+ { 34, 20 }, { 20, 34 }, { 33, 21 }, { 21, 33 },
+ { 38, 17 }, { 17, 38 }, { 32, 22 }, { 22, 32 },
+ { 37, 18 }, { 18, 37 }, { 36, 19 }, { 19, 36 },
+ { 40, 16 }, { 16, 40 }, { 35, 20 }, { 20, 35 },
+ { 34, 21 }, { 21, 34 }, { 39, 17 }, { 17, 39 },
+ { 33, 22 }, { 22, 33 }, { 32, 23 }, { 23, 32 },
+ { 38, 18 }, { 18, 38 }, { 37, 19 }, { 19, 37 },
+ { 41, 16 }, { 16, 41 }, { 36, 20 }, { 20, 36 },
+ { 35, 21 }, { 21, 35 }, { 40, 17 }, { 17, 40 },
+ { 34, 22 }, { 22, 34 }, { 33, 23 }, { 23, 33 },
+ { 39, 18 }, { 18, 39 }, { 32, 24 }, { 24, 32 },
+ { 38, 19 }, { 19, 38 }, { 42, 16 }, { 16, 42 },
+ { 37, 20 }, { 20, 37 }, { 41, 17 }, { 17, 41 },
+ { 36, 21 }, { 21, 36 }, { 35, 22 }, { 22, 35 },
+ { 40, 18 }, { 18, 40 }, { 34, 23 }, { 23, 34 },
+ { 33, 24 }, { 24, 33 }, { 32, 25 }, { 25, 32 },
+ { 39, 19 }, { 19, 39 }, { 43, 16 }, { 16, 43 },
+ { 38, 20 }, { 20, 38 }, { 42, 17 }, { 17, 42 },
+ { 37, 21 }, { 21, 37 }, { 36, 22 }, { 22, 36 },
+ { 41, 18 }, { 18, 41 }, { 35, 23 }, { 23, 35 },
+ { 34, 24 }, { 24, 34 }, { 40, 19 }, { 19, 40 },
+ { 44, 16 }, { 16, 44 }, { 33, 25 }, { 25, 33 },
+ { 32, 26 }, { 26, 32 }, { 39, 20 }, { 20, 39 },
+ { 43, 17 }, { 17, 43 }, { 38, 21 }, { 21, 38 },
+ { 37, 22 }, { 22, 37 }, { 42, 18 }, { 18, 42 },
+ { 36, 23 }, { 23, 36 }, { 45, 16 }, { 16, 45 },
+ { 41, 19 }, { 19, 41 }, { 35, 24 }, { 24, 35 },
+ { 34, 25 }, { 25, 34 }, { 33, 26 }, { 26, 33 },
+ { 40, 20 }, { 20, 40 }, { 32, 27 }, { 27, 32 },
+ { 44, 17 }, { 17, 44 }, { 39, 21 }, { 21, 39 },
+ { 43, 18 }, { 18, 43 }, { 38, 22 }, { 22, 38 },
+ { 37, 23 }, { 23, 37 }, { 46, 16 }, { 16, 46 },
+ { 42, 19 }, { 19, 42 }, { 36, 24 }, { 24, 36 },
+ { 35, 25 }, { 25, 35 }, { 41, 20 }, { 20, 41 },
+ { 34, 26 }, { 26, 34 }, { 45, 17 }, { 17, 45 },
+ { 33, 27 }, { 27, 33 }, { 32, 28 }, { 28, 32 },
+ { 40, 21 }, { 21, 40 }, { 44, 18 }, { 18, 44 },
+ { 39, 22 }, { 22, 39 }, { 38, 23 }, { 23, 38 },
+ { 47, 16 }, { 16, 47 }, { 43, 19 }, { 19, 43 },
+ { 37, 24 }, { 24, 37 }, { 36, 25 }, { 25, 36 },
+ { 42, 20 }, { 20, 42 }, { 46, 17 }, { 17, 46 },
+ { 35, 26 }, { 26, 35 }, { 34, 27 }, { 27, 34 },
+ { 33, 28 }, { 28, 33 }, { 41, 21 }, { 21, 41 },
+ { 32, 29 }, { 29, 32 }, { 45, 18 }, { 18, 45 },
+ { 40, 22 }, { 22, 40 }, { 48, 16 }, { 16, 48 },
+ { 39, 23 }, { 23, 39 }, { 44, 19 }, { 19, 44 },
+ { 38, 24 }, { 24, 38 }, { 43, 20 }, { 20, 43 },
+ { 47, 17 }, { 17, 47 }, { 37, 25 }, { 25, 37 },
+ { 36, 26 }, { 26, 36 }, { 35, 27 }, { 27, 35 },
+ { 42, 21 }, { 21, 42 }, { 34, 28 }, { 28, 34 },
+ { 46, 18 }, { 18, 46 }, { 33, 29 }, { 29, 33 },
+ { 32, 30 }, { 30, 32 }, { 41, 22 }, { 22, 41 },
+ { 49, 16 }, { 16, 49 }, { 45, 19 }, { 19, 45 },
+ { 40, 23 }, { 23, 40 }, { 39, 24 }, { 24, 39 },
+ { 48, 17 }, { 17, 48 }, { 44, 20 }, { 20, 44 },
+ { 38, 25 }, { 25, 38 }, { 37, 26 }, { 26, 37 },
+ { 43, 21 }, { 21, 43 }, { 36, 27 }, { 27, 36 },
+ { 47, 18 }, { 18, 47 }, { 35, 28 }, { 28, 35 },
+ { 34, 29 }, { 29, 34 }, { 42, 22 }, { 22, 42 },
+ { 33, 30 }, { 30, 33 }, { 50, 16 }, { 16, 50 },
+ { 32, 31 }, { 31, 32 }, { 46, 19 }, { 19, 46 },
+ { 41, 23 }, { 23, 41 }, { 40, 24 }, { 24, 40 },
+ { 49, 17 }, { 17, 49 }, { 45, 20 }, { 20, 45 },
+ { 39, 25 }, { 25, 39 }, { 38, 26 }, { 26, 38 },
+ { 44, 21 }, { 21, 44 }, { 48, 18 }, { 18, 48 },
+ { 37, 27 }, { 27, 37 }, { 36, 28 }, { 28, 36 },
+ { 51, 16 }, { 16, 51 }, { 43, 22 }, { 22, 43 },
+ { 35, 29 }, { 29, 35 }, { 34, 30 }, { 30, 34 },
+ { 33, 31 }, { 31, 33 }, { 32, 32 }, { 47, 19 },
+ { 19, 47 }, { 42, 23 }, { 23, 42 }, { 50, 17 },
+ { 17, 50 }, { 41, 24 }, { 24, 41 }, { 46, 20 },
+ { 20, 46 }, { 40, 25 }, { 25, 40 }, { 45, 21 },
+ { 21, 45 }, { 49, 18 }, { 18, 49 }, { 39, 26 },
+ { 26, 39 }, { 38, 27 }, { 27, 38 }, { 52, 16 },
+ { 16, 52 }, { 44, 22 }, { 22, 44 }, { 37, 28 },
+ { 28, 37 }, { 36, 29 }, { 29, 36 }, { 48, 19 },
+ { 19, 48 }, { 35, 30 }, { 30, 35 }, { 34, 31 },
+ { 31, 34 }, { 33, 32 }, { 32, 33 }, { 43, 23 },
+ { 23, 43 }, { 51, 17 }, { 17, 51 }, { 47, 20 },
+ { 20, 47 }, { 42, 24 }, { 24, 42 }, { 41, 25 },
+ { 25, 41 }, { 50, 18 }, { 18, 50 }, { 46, 21 },
+ { 21, 46 }, { 40, 26 }, { 26, 40 }, { 53, 16 },
+ { 16, 53 }, { 39, 27 }, { 27, 39 }, { 45, 22 },
+ { 22, 45 }, { 38, 28 }, { 28, 38 }, { 49, 19 },
+ { 19, 49 }, { 37, 29 }, { 29, 37 }, { 36, 30 },
+ { 30, 36 }, { 44, 23 }, { 23, 44 }, { 52, 17 },
+ { 17, 52 }, { 35, 31 }, { 31, 35 }, { 34, 32 },
+ { 32, 34 }, { 33, 33 }, { 48, 20 }, { 20, 48 },
+ { 43, 24 }, { 24, 43 }, { 42, 25 }, { 25, 42 },
+ { 51, 18 }, { 18, 51 }, { 47, 21 }, { 21, 47 },
+ { 54, 16 }, { 16, 54 }, { 41, 26 }, { 26, 41 },
+ { 40, 27 }, { 27, 40 }, { 46, 22 }, { 22, 46 },
+ { 50, 19 }, { 19, 50 }, { 39, 28 }, { 28, 39 },
+ { 38, 29 }, { 29, 38 }, { 53, 17 }, { 17, 53 },
+ { 45, 23 }, { 23, 45 }, { 37, 30 }, { 30, 37 },
+ { 36, 31 }, { 31, 36 }, { 49, 20 }, { 20, 49 },
+ { 35, 32 }, { 32, 35 }, { 34, 33 }, { 33, 34 },
+ { 44, 24 }, { 24, 44 }, { 52, 18 }, { 18, 52 },
+ { 43, 25 }, { 25, 43 }, { 48, 21 }, { 21, 48 },
+ { 55, 16 }, { 16, 55 }, { 42, 26 }, { 26, 42 },
+ { 47, 22 }, { 22, 47 }, { 51, 19 }, { 19, 51 },
+ { 41, 27 }, { 27, 41 }, { 40, 28 }, { 28, 40 },
+ { 54, 17 }, { 17, 54 }, { 46, 23 }, { 23, 46 },
+ { 39, 29 }, { 29, 39 }, { 50, 20 }, { 20, 50 },
+ { 38, 30 }, { 30, 38 }, { 37, 31 }, { 31, 37 },
+ { 45, 24 }, { 24, 45 }, { 36, 32 }, { 32, 36 },
+ { 35, 33 }, { 33, 35 }, { 34, 34 }, { 53, 18 },
+ { 18, 53 }, { 56, 16 }, { 16, 56 }, { 49, 21 },
+ { 21, 49 }, { 44, 25 }, { 25, 44 }, { 43, 26 },
+ { 26, 43 }, { 48, 22 }, { 22, 48 }, { 52, 19 },
+ { 19, 52 }, { 42, 27 }, { 27, 42 }, { 55, 17 },
+ { 17, 55 }, { 41, 28 }, { 28, 41 }, { 47, 23 },
+ { 23, 47 }, { 40, 29 }, { 29, 40 }, { 51, 20 },
+ { 20, 51 }, { 39, 30 }, { 30, 39 }, { 46, 24 },
+ { 24, 46 }, { 38, 31 }, { 31, 38 }, { 54, 18 },
+ { 18, 54 }, { 37, 32 }, { 32, 37 }, { 57, 16 },
+ { 16, 57 }, { 36, 33 }, { 33, 36 }, { 35, 34 },
+ { 34, 35 }, { 50, 21 }, { 21, 50 }, { 45, 25 },
+ { 25, 45 }, { 44, 26 }, { 26, 44 }, { 53, 19 },
+ { 19, 53 }, { 49, 22 }, { 22, 49 }, { 56, 17 },
+ { 17, 56 }, { 43, 27 }, { 27, 43 }, { 48, 23 },
+ { 23, 48 }, { 42, 28 }, { 28, 42 }, { 52, 20 },
+ { 20, 52 }, { 41, 29 }, { 29, 41 }, { 40, 30 },
+ { 30, 40 }, { 47, 24 }, { 24, 47 }, { 55, 18 },
+ { 18, 55 }, { 58, 16 }, { 16, 58 }, { 39, 31 },
+ { 31, 39 }, { 51, 21 }, { 21, 51 }, { 38, 32 },
+ { 32, 38 }, { 37, 33 }, { 33, 37 }, { 46, 25 },
+ { 25, 46 }, { 36, 34 }, { 34, 36 }, { 35, 35 },
+ { 54, 19 }, { 19, 54 }, { 45, 26 }, { 26, 45 },
+ { 50, 22 }, { 22, 50 }, { 57, 17 }, { 17, 57 },
+ { 44, 27 }, { 27, 44 }, { 49, 23 }, { 23, 49 },
+ { 53, 20 }, { 20, 53 }, { 43, 28 }, { 28, 43 },
+ { 42, 29 }, { 29, 42 }, { 56, 18 }, { 18, 56 },
+ { 59, 16 }, { 16, 59 }, { 48, 24 }, { 24, 48 },
+ { 41, 30 }, { 30, 41 }, { 52, 21 }, { 21, 52 },
+ { 40, 31 }, { 31, 40 }, { 39, 32 }, { 32, 39 },
+ { 47, 25 }, { 25, 47 }, { 38, 33 }, { 33, 38 },
+ { 37, 34 }, { 34, 37 }, { 36, 35 }, { 35, 36 },
+ { 55, 19 }, { 19, 55 }, { 51, 22 }, { 22, 51 },
+ { 46, 26 }, { 26, 46 }, { 58, 17 }, { 17, 58 },
+ { 45, 27 }, { 27, 45 }, { 50, 23 }, { 23, 50 },
+ { 54, 20 }, { 20, 54 }, { 44, 28 }, { 28, 44 },
+ { 57, 18 }, { 18, 57 }, { 60, 16 }, { 16, 60 },
+ { 43, 29 }, { 29, 43 }, { 49, 24 }, { 24, 49 },
+ { 53, 21 }, { 21, 53 }, { 42, 30 }, { 30, 42 },
+ { 41, 31 }, { 31, 41 }, { 48, 25 }, { 25, 48 },
+ { 40, 32 }, { 32, 40 }, { 56, 19 }, { 19, 56 },
+ { 39, 33 }, { 33, 39 }, { 59, 17 }, { 17, 59 },
+ { 52, 22 }, { 22, 52 }, { 38, 34 }, { 34, 38 },
+ { 37, 35 }, { 35, 37 }, { 36, 36 }, { 47, 26 },
+ { 26, 47 }, { 46, 27 }, { 27, 46 }, { 55, 20 },
+ { 20, 55 }, { 51, 23 }, { 23, 51 }, { 45, 28 },
+ { 28, 45 }, { 61, 16 }, { 16, 61 }, { 58, 18 },
+ { 18, 58 }, { 50, 24 }, { 24, 50 }, { 44, 29 },
+ { 29, 44 }, { 54, 21 }, { 21, 54 }, { 43, 30 },
+ { 30, 43 }, { 49, 25 }, { 25, 49 }, { 42, 31 },
+ { 31, 42 }, { 57, 19 }, { 19, 57 }, { 60, 17 },
+ { 17, 60 }, { 41, 32 }, { 32, 41 }, { 53, 22 },
+ { 22, 53 }, { 40, 33 }, { 33, 40 }, { 48, 26 },
+ { 26, 48 }, { 39, 34 }, { 34, 39 }, { 38, 35 },
+ { 35, 38 }, { 37, 36 }, { 36, 37 }, { 56, 20 },
+ { 20, 56 }, { 47, 27 }, { 27, 47 }, { 52, 23 },
+ { 23, 52 }, { 62, 16 }, { 16, 62 }, { 59, 18 },
+ { 18, 59 }, { 46, 28 }, { 28, 46 }, { 51, 24 },
+ { 24, 51 }, { 55, 21 }, { 21, 55 }, { 45, 29 },
+ { 29, 45 }, { 44, 30 }, { 30, 44 }, { 58, 19 },
+ { 19, 58 }, { 50, 25 }, { 25, 50 }, { 61, 17 },
+ { 17, 61 }, { 43, 31 }, { 31, 43 }, { 54, 22 },
+ { 22, 54 }, { 42, 32 }, { 32, 42 }, { 49, 26 },
+ { 26, 49 }, { 41, 33 }, { 33, 41 }, { 40, 34 },
+ { 34, 40 }, { 57, 20 }, { 20, 57 }, { 39, 35 },
+ { 35, 39 }, { 38, 36 }, { 36, 38 }, { 37, 37 },
+ { 53, 23 }, { 23, 53 }, { 48, 27 }, { 27, 48 },
+ { 63, 16 }, { 16, 63 }, { 60, 18 }, { 18, 60 },
+ { 47, 28 }, { 28, 47 }, { 52, 24 }, { 24, 52 },
+ { 56, 21 }, { 21, 56 }, { 46, 29 }, { 29, 46 },
+ { 59, 19 }, { 19, 59 }, { 62, 17 }, { 17, 62 },
+ { 45, 30 }, { 30, 45 }, { 51, 25 }, { 25, 51 },
+ { 55, 22 }, { 22, 55 }, { 44, 31 }, { 31, 44 },
+ { 43, 32 }, { 32, 43 }, { 50, 26 }, { 26, 50 },
+ { 42, 33 }, { 33, 42 }, { 58, 20 }, { 20, 58 },
+ { 41, 34 }, { 34, 41 }, { 54, 23 }, { 23, 54 },
+ { 61, 18 }, { 18, 61 }, { 40, 35 }, { 35, 40 },
+ { 49, 27 }, { 27, 49 }, { 39, 36 }, { 36, 39 },
+ { 38, 37 }, { 37, 38 }, { 48, 28 }, { 28, 48 },
+ { 57, 21 }, { 21, 57 }, { 53, 24 }, { 24, 53 },
+ { 47, 29 }, { 29, 47 }, { 60, 19 }, { 19, 60 },
+ { 63, 17 }, { 17, 63 }, { 52, 25 }, { 25, 52 },
+ { 46, 30 }, { 30, 46 }, { 56, 22 }, { 22, 56 },
+ { 45, 31 }, { 31, 45 }, { 51, 26 }, { 26, 51 },
+ { 44, 32 }, { 32, 44 }, { 59, 20 }, { 20, 59 },
+ { 62, 18 }, { 18, 62 }, { 43, 33 }, { 33, 43 },
+ { 55, 23 }, { 23, 55 }, { 42, 34 }, { 34, 42 },
+ { 50, 27 }, { 27, 50 }, { 41, 35 }, { 35, 41 },
+ { 40, 36 }, { 36, 40 }, { 39, 37 }, { 37, 39 },
+ { 38, 38 }, { 58, 21 }, { 21, 58 }, { 49, 28 },
+ { 28, 49 }, { 54, 24 }, { 24, 54 }, { 61, 19 },
+ { 19, 61 }, { 48, 29 }, { 29, 48 }, { 53, 25 },
+ { 25, 53 }, { 57, 22 }, { 22, 57 }, { 47, 30 },
+ { 30, 47 }, { 46, 31 }, { 31, 46 }, { 60, 20 },
+ { 20, 60 }, { 52, 26 }, { 26, 52 }, { 63, 18 },
+ { 18, 63 }, { 45, 32 }, { 32, 45 }, { 56, 23 },
+ { 23, 56 }, { 44, 33 }, { 33, 44 }, { 51, 27 },
+ { 27, 51 }, { 43, 34 }, { 34, 43 }, { 42, 35 },
+ { 35, 42 }, { 59, 21 }, { 21, 59 }, { 41, 36 },
+ { 36, 41 }, { 55, 24 }, { 24, 55 }, { 40, 37 },
+ { 37, 40 }, { 50, 28 }, { 28, 50 }, { 39, 38 },
+ { 38, 39 }, { 62, 19 }, { 19, 62 }, { 49, 29 },
+ { 29, 49 }, { 54, 25 }, { 25, 54 }, { 58, 22 },
+ { 22, 58 }, { 48, 30 }, { 30, 48 }, { 61, 20 },
+ { 20, 61 }, { 47, 31 }, { 31, 47 }, { 53, 26 },
+ { 26, 53 }, { 57, 23 }, { 23, 57 }, { 46, 32 },
+ { 32, 46 }, { 52, 27 }, { 27, 52 }, { 45, 33 },
+ { 33, 45 }, { 60, 21 }, { 21, 60 }, { 44, 34 },
+ { 34, 44 }, { 56, 24 }, { 24, 56 }, { 43, 35 },
+ { 35, 43 }, { 63, 19 }, { 19, 63 }, { 51, 28 },
+ { 28, 51 }, { 42, 36 }, { 36, 42 }, { 41, 37 },
+ { 37, 41 }, { 40, 38 }, { 38, 40 }, { 39, 39 },
+ { 50, 29 }, { 29, 50 }, { 55, 25 }, { 25, 55 },
+ { 59, 22 }, { 22, 59 }, { 49, 30 }, { 30, 49 },
+ { 62, 20 }, { 20, 62 }, { 54, 26 }, { 26, 54 },
+ { 48, 31 }, { 31, 48 }, { 58, 23 }, { 23, 58 },
+ { 47, 32 }, { 32, 47 }, { 53, 27 }, { 27, 53 },
+ { 46, 33 }, { 33, 46 }, { 61, 21 }, { 21, 61 },
+ { 57, 24 }, { 24, 57 }, { 45, 34 }, { 34, 45 },
+ { 52, 28 }, { 28, 52 }, { 44, 35 }, { 35, 44 },
+ { 43, 36 }, { 36, 43 }, { 42, 37 }, { 37, 42 },
+ { 41, 38 }, { 38, 41 }, { 40, 39 }, { 39, 40 },
+ { 60, 22 }, { 22, 60 }, { 51, 29 }, { 29, 51 },
+ { 56, 25 }, { 25, 56 }, { 63, 20 }, { 20, 63 },
+ { 50, 30 }, { 30, 50 }, { 55, 26 }, { 26, 55 },
+ { 59, 23 }, { 23, 59 }, { 49, 31 }, { 31, 49 },
+ { 48, 32 }, { 32, 48 }, { 54, 27 }, { 27, 54 },
+ { 62, 21 }, { 21, 62 }, { 47, 33 }, { 33, 47 },
+ { 58, 24 }, { 24, 58 }, { 46, 34 }, { 34, 46 },
+ { 53, 28 }, { 28, 53 }, { 45, 35 }, { 35, 45 },
+ { 44, 36 }, { 36, 44 }, { 61, 22 }, { 22, 61 },
+ { 57, 25 }, { 25, 57 }, { 52, 29 }, { 29, 52 },
+ { 43, 37 }, { 37, 43 }, { 42, 38 }, { 38, 42 },
+ { 41, 39 }, { 39, 41 }, { 40, 40 }, { 51, 30 },
+ { 30, 51 }, { 56, 26 }, { 26, 56 }, { 60, 23 },
+ { 23, 60 }, { 50, 31 }, { 31, 50 }, { 63, 21 },
+ { 21, 63 }, { 55, 27 }, { 27, 55 }, { 49, 32 },
+ { 32, 49 }, { 59, 24 }, { 24, 59 }, { 48, 33 },
+ { 33, 48 }, { 54, 28 }, { 28, 54 }, { 47, 34 },
+ { 34, 47 }, { 62, 22 }, { 22, 62 }, { 46, 35 },
+ { 35, 46 }, { 58, 25 }, { 25, 58 }, { 45, 36 },
+ { 36, 45 }, { 53, 29 }, { 29, 53 }, { 44, 37 },
+ { 37, 44 }, { 43, 38 }, { 38, 43 }, { 42, 39 },
+ { 39, 42 }, { 41, 40 }, { 40, 41 }, { 52, 30 },
+ { 30, 52 }, { 57, 26 }, { 26, 57 }, { 61, 23 },
+ { 23, 61 }, { 51, 31 }, { 31, 51 }, { 56, 27 },
+ { 27, 56 }, { 50, 32 }, { 32, 50 }, { 60, 24 },
+ { 24, 60 }, { 49, 33 }, { 33, 49 }, { 55, 28 },
+ { 28, 55 }, { 63, 22 }, { 22, 63 }, { 48, 34 },
+ { 34, 48 }, { 59, 25 }, { 25, 59 }, { 47, 35 },
+ { 35, 47 }, { 54, 29 }, { 29, 54 }, { 46, 36 },
+ { 36, 46 }, { 45, 37 }, { 37, 45 }, { 44, 38 },
+ { 38, 44 }, { 53, 30 }, { 30, 53 }, { 62, 23 },
+ { 23, 62 }, { 58, 26 }, { 26, 58 }, { 43, 39 },
+ { 39, 43 }, { 42, 40 }, { 40, 42 }, { 41, 41 },
+ { 52, 31 }, { 31, 52 }, { 57, 27 }, { 27, 57 },
+ { 61, 24 }, { 24, 61 }, { 51, 32 }, { 32, 51 },
+ { 50, 33 }, { 33, 50 }, { 56, 28 }, { 28, 56 },
+ { 49, 34 }, { 34, 49 }, { 60, 25 }, { 25, 60 },
+ { 48, 35 }, { 35, 48 }, { 55, 29 }, { 29, 55 },
+ { 47, 36 }, { 36, 47 }, { 63, 23 }, { 23, 63 },
+ { 46, 37 }, { 37, 46 }, { 59, 26 }, { 26, 59 },
+ { 54, 30 }, { 30, 54 }, { 45, 38 }, { 38, 45 },
+ { 44, 39 }, { 39, 44 }, { 43, 40 }, { 40, 43 },
+ { 42, 41 }, { 41, 42 }, { 53, 31 }, { 31, 53 },
+ { 58, 27 }, { 27, 58 }, { 62, 24 }, { 24, 62 },
+ { 52, 32 }, { 32, 52 }, { 57, 28 }, { 28, 57 },
+ { 51, 33 }, { 33, 51 }, { 61, 25 }, { 25, 61 },
+ { 50, 34 }, { 34, 50 }, { 56, 29 }, { 29, 56 },
+ { 49, 35 }, { 35, 49 }, { 48, 36 }, { 36, 48 },
+ { 60, 26 }, { 26, 60 }, { 55, 30 }, { 30, 55 },
+ { 47, 37 }, { 37, 47 }, { 46, 38 }, { 38, 46 },
+ { 45, 39 }, { 39, 45 }, { 44, 40 }, { 40, 44 },
+ { 54, 31 }, { 31, 54 }, { 43, 41 }, { 41, 43 },
+ { 42, 42 }, { 59, 27 }, { 27, 59 }, { 63, 24 },
+ { 24, 63 }, { 53, 32 }, { 32, 53 }, { 58, 28 },
+ { 28, 58 }, { 52, 33 }, { 33, 52 }, { 62, 25 },
+ { 25, 62 }, { 51, 34 }, { 34, 51 }, { 57, 29 },
+ { 29, 57 }, { 50, 35 }, { 35, 50 }, { 61, 26 },
+ { 26, 61 }, { 49, 36 }, { 36, 49 }, { 56, 30 },
+ { 30, 56 }, { 48, 37 }, { 37, 48 }, { 47, 38 },
+ { 38, 47 }, { 55, 31 }, { 31, 55 }, { 60, 27 },
+ { 27, 60 }, { 46, 39 }, { 39, 46 }, { 45, 40 },
+ { 40, 45 }, { 44, 41 }, { 41, 44 }, { 43, 42 },
+ { 42, 43 }, { 54, 32 }, { 32, 54 }, { 59, 28 },
+ { 28, 59 }, { 63, 25 }, { 25, 63 }, { 53, 33 },
+ { 33, 53 }, { 52, 34 }, { 34, 52 }, { 58, 29 },
+ { 29, 58 }, { 51, 35 }, { 35, 51 }, { 62, 26 },
+ { 26, 62 }, { 57, 30 }, { 30, 57 }, { 50, 36 },
+ { 36, 50 }, { 49, 37 }, { 37, 49 }, { 61, 27 },
+ { 27, 61 }, { 48, 38 }, { 38, 48 }, { 56, 31 },
+ { 31, 56 }, { 47, 39 }, { 39, 47 }, { 46, 40 },
+ { 40, 46 }, { 45, 41 }, { 41, 45 }, { 44, 42 },
+ { 42, 44 }, { 43, 43 }, { 55, 32 }, { 32, 55 },
+ { 60, 28 }, { 28, 60 }, { 54, 33 }, { 33, 54 },
+ { 59, 29 }, { 29, 59 }, { 53, 34 }, { 34, 53 },
+ { 63, 26 }, { 26, 63 }, { 52, 35 }, { 35, 52 },
+ { 58, 30 }, { 30, 58 }, { 51, 36 }, { 36, 51 },
+ { 50, 37 }, { 37, 50 }, { 62, 27 }, { 27, 62 },
+ { 57, 31 }, { 31, 57 }, { 49, 38 }, { 38, 49 },
+ { 48, 39 }, { 39, 48 }, { 47, 40 }, { 40, 47 },
+ { 56, 32 }, { 32, 56 }, { 46, 41 }, { 41, 46 },
+ { 61, 28 }, { 28, 61 }, { 45, 42 }, { 42, 45 },
+ { 44, 43 }, { 43, 44 }, { 55, 33 }, { 33, 55 },
+ { 60, 29 }, { 29, 60 }, { 54, 34 }, { 34, 54 },
+ { 53, 35 }, { 35, 53 }, { 59, 30 }, { 30, 59 },
+ { 52, 36 }, { 36, 52 }, { 63, 27 }, { 27, 63 },
+ { 51, 37 }, { 37, 51 }, { 58, 31 }, { 31, 58 },
+ { 50, 38 }, { 38, 50 }, { 49, 39 }, { 39, 49 },
+ { 57, 32 }, { 32, 57 }, { 62, 28 }, { 28, 62 },
+ { 48, 40 }, { 40, 48 }, { 47, 41 }, { 41, 47 },
+ { 46, 42 }, { 42, 46 }, { 45, 43 }, { 43, 45 },
+ { 44, 44 }, { 56, 33 }, { 33, 56 }, { 61, 29 },
+ { 29, 61 }, { 55, 34 }, { 34, 55 }, { 54, 35 },
+ { 35, 54 }, { 60, 30 }, { 30, 60 }, { 53, 36 },
+ { 36, 53 }, { 59, 31 }, { 31, 59 }, { 52, 37 },
+ { 37, 52 }, { 51, 38 }, { 38, 51 }, { 63, 28 },
+ { 28, 63 }, { 58, 32 }, { 32, 58 }, { 50, 39 },
+ { 39, 50 }, { 49, 40 }, { 40, 49 }, { 48, 41 },
+ { 41, 48 }, { 57, 33 }, { 33, 57 }, { 47, 42 },
+ { 42, 47 }, { 46, 43 }, { 43, 46 }, { 45, 44 },
+ { 44, 45 }, { 62, 29 }, { 29, 62 }, { 56, 34 },
+ { 34, 56 }, { 61, 30 }, { 30, 61 }, { 55, 35 },
+ { 35, 55 }, { 54, 36 }, { 36, 54 }, { 60, 31 },
+ { 31, 60 }, { 53, 37 }, { 37, 53 }, { 52, 38 },
+ { 38, 52 }, { 59, 32 }, { 32, 59 }, { 51, 39 },
+ { 39, 51 }, { 50, 40 }, { 40, 50 }, { 58, 33 },
+ { 33, 58 }, { 49, 41 }, { 41, 49 }, { 63, 29 },
+ { 29, 63 }, { 48, 42 }, { 42, 48 }, { 47, 43 },
+ { 43, 47 }, { 46, 44 }, { 44, 46 }, { 45, 45 },
+ { 57, 34 }, { 34, 57 }, { 62, 30 }, { 30, 62 },
+ { 56, 35 }, { 35, 56 }, { 55, 36 }, { 36, 55 },
+ { 61, 31 }, { 31, 61 }, { 54, 37 }, { 37, 54 },
+ { 60, 32 }, { 32, 60 }, { 53, 38 }, { 38, 53 },
+ { 52, 39 }, { 39, 52 }, { 51, 40 }, { 40, 51 },
+ { 59, 33 }, { 33, 59 }, { 50, 41 }, { 41, 50 },
+ { 49, 42 }, { 42, 49 }, { 48, 43 }, { 43, 48 },
+ { 58, 34 }, { 34, 58 }, { 47, 44 }, { 44, 47 },
+ { 46, 45 }, { 45, 46 }, { 63, 30 }, { 30, 63 },
+ { 57, 35 }, { 35, 57 }, { 62, 31 }, { 31, 62 },
+ { 56, 36 }, { 36, 56 }, { 55, 37 }, { 37, 55 },
+ { 61, 32 }, { 32, 61 }, { 54, 38 }, { 38, 54 },
+ { 53, 39 }, { 39, 53 }, { 60, 33 }, { 33, 60 },
+ { 52, 40 }, { 40, 52 }, { 51, 41 }, { 41, 51 },
+ { 50, 42 }, { 42, 50 }, { 59, 34 }, { 34, 59 },
+ { 49, 43 }, { 43, 49 }, { 48, 44 }, { 44, 48 },
+ { 47, 45 }, { 45, 47 }, { 46, 46 }, { 58, 35 },
+ { 35, 58 }, { 63, 31 }, { 31, 63 }, { 57, 36 },
+ { 36, 57 }, { 56, 37 }, { 37, 56 }, { 62, 32 },
+ { 32, 62 }, { 55, 38 }, { 38, 55 }, { 54, 39 },
+ { 39, 54 }, { 61, 33 }, { 33, 61 }, { 53, 40 },
+ { 40, 53 }, { 52, 41 }, { 41, 52 }, { 60, 34 },
+ { 34, 60 }, { 51, 42 }, { 42, 51 }, { 50, 43 },
+ { 43, 50 }, { 49, 44 }, { 44, 49 }, { 48, 45 },
+ { 45, 48 }, { 59, 35 }, { 35, 59 }, { 47, 46 },
+ { 46, 47 }, { 58, 36 }, { 36, 58 }, { 57, 37 },
+ { 37, 57 }, { 63, 32 }, { 32, 63 }, { 56, 38 },
+ { 38, 56 }, { 62, 33 }, { 33, 62 }, { 55, 39 },
+ { 39, 55 }, { 54, 40 }, { 40, 54 }, { 61, 34 },
+ { 34, 61 }, { 53, 41 }, { 41, 53 }, { 52, 42 },
+ { 42, 52 }, { 51, 43 }, { 43, 51 }, { 60, 35 },
+ { 35, 60 }, { 50, 44 }, { 44, 50 }, { 49, 45 },
+ { 45, 49 }, { 48, 46 }, { 46, 48 }, { 47, 47 },
+ { 59, 36 }, { 36, 59 }, { 58, 37 }, { 37, 58 },
+ { 57, 38 }, { 38, 57 }, { 63, 33 }, { 33, 63 },
+ { 56, 39 }, { 39, 56 }, { 55, 40 }, { 40, 55 },
+ { 62, 34 }, { 34, 62 }, { 54, 41 }, { 41, 54 },
+ { 53, 42 }, { 42, 53 }, { 61, 35 }, { 35, 61 },
+ { 52, 43 }, { 43, 52 }, { 51, 44 }, { 44, 51 },
+ { 50, 45 }, { 45, 50 }, { 49, 46 }, { 46, 49 },
+ { 48, 47 }, { 47, 48 }, { 60, 36 }, { 36, 60 },
+ { 59, 37 }, { 37, 59 }, { 58, 38 }, { 38, 58 },
+ { 57, 39 }, { 39, 57 }, { 56, 40 }, { 40, 56 },
+ { 63, 34 }, { 34, 63 }, { 55, 41 }, { 41, 55 },
+ { 54, 42 }, { 42, 54 }, { 62, 35 }, { 35, 62 },
+ { 53, 43 }, { 43, 53 }, { 52, 44 }, { 44, 52 },
+ { 51, 45 }, { 45, 51 }, { 61, 36 }, { 36, 61 },
+ { 50, 46 }, { 46, 50 }, { 49, 47 }, { 47, 49 },
+ { 48, 48 }, { 60, 37 }, { 37, 60 }, { 59, 38 },
+ { 38, 59 }, { 58, 39 }, { 39, 58 }, { 57, 40 },
+ { 40, 57 }, { 56, 41 }, { 41, 56 }, { 63, 35 },
+ { 35, 63 }, { 55, 42 }, { 42, 55 }, { 54, 43 },
+ { 43, 54 }, { 53, 44 }, { 44, 53 }, { 62, 36 },
+ { 36, 62 }, { 52, 45 }, { 45, 52 }, { 51, 46 },
+ { 46, 51 }, { 50, 47 }, { 47, 50 }, { 49, 48 },
+ { 48, 49 }, { 61, 37 }, { 37, 61 }, { 60, 38 },
+ { 38, 60 }, { 59, 39 }, { 39, 59 }, { 58, 40 },
+ { 40, 58 }, { 57, 41 }, { 41, 57 }, { 56, 42 },
+ { 42, 56 }, { 55, 43 }, { 43, 55 }, { 63, 36 },
+ { 36, 63 }, { 54, 44 }, { 44, 54 }, { 53, 45 },
+ { 45, 53 }, { 52, 46 }, { 46, 52 }, { 62, 37 },
+ { 37, 62 }, { 51, 47 }, { 47, 51 }, { 50, 48 },
+ { 48, 50 }, { 49, 49 }, { 61, 38 }, { 38, 61 },
+ { 60, 39 }, { 39, 60 }, { 59, 40 }, { 40, 59 },
+ { 58, 41 }, { 41, 58 }, { 57, 42 }, { 42, 57 },
+ { 56, 43 }, { 43, 56 }, { 55, 44 }, { 44, 55 },
+ { 54, 45 }, { 45, 54 }, { 63, 37 }, { 37, 63 },
+ { 53, 46 }, { 46, 53 }, { 52, 47 }, { 47, 52 },
+ { 51, 48 }, { 48, 51 }, { 50, 49 }, { 49, 50 },
+ { 62, 38 }, { 38, 62 }, { 61, 39 }, { 39, 61 },
+ { 60, 40 }, { 40, 60 }, { 59, 41 }, { 41, 59 },
+ { 58, 42 }, { 42, 58 }, { 57, 43 }, { 43, 57 },
+ { 56, 44 }, { 44, 56 }, { 55, 45 }, { 45, 55 },
+ { 54, 46 }, { 46, 54 }, { 53, 47 }, { 47, 53 },
+ { 52, 48 }, { 48, 52 }, { 63, 38 }, { 38, 63 },
+ { 51, 49 }, { 49, 51 }, { 50, 50 }, { 62, 39 },
+ { 39, 62 }, { 61, 40 }, { 40, 61 }, { 60, 41 },
+ { 41, 60 }, { 59, 42 }, { 42, 59 }, { 58, 43 },
+ { 43, 58 }, { 57, 44 }, { 44, 57 }, { 56, 45 },
+ { 45, 56 }, { 55, 46 }, { 46, 55 }, { 54, 47 },
+ { 47, 54 }, { 53, 48 }, { 48, 53 }, { 52, 49 },
+ { 49, 52 }, { 51, 50 }, { 50, 51 }, { 63, 39 },
+ { 39, 63 }, { 62, 40 }, { 40, 62 }, { 61, 41 },
+ { 41, 61 }, { 60, 42 }, { 42, 60 }, { 59, 43 },
+ { 43, 59 }, { 58, 44 }, { 44, 58 }, { 57, 45 },
+ { 45, 57 }, { 56, 46 }, { 46, 56 }, { 55, 47 },
+ { 47, 55 }, { 54, 48 }, { 48, 54 }, { 53, 49 },
+ { 49, 53 }, { 52, 50 }, { 50, 52 }, { 51, 51 },
+ { 63, 40 }, { 40, 63 }, { 62, 41 }, { 41, 62 },
+ { 61, 42 }, { 42, 61 }, { 60, 43 }, { 43, 60 },
+ { 59, 44 }, { 44, 59 }, { 58, 45 }, { 45, 58 },
+ { 57, 46 }, { 46, 57 }, { 56, 47 }, { 47, 56 },
+ { 55, 48 }, { 48, 55 }, { 54, 49 }, { 49, 54 },
+ { 53, 50 }, { 50, 53 }, { 52, 51 }, { 51, 52 },
+ { 63, 41 }, { 41, 63 }, { 62, 42 }, { 42, 62 },
+ { 61, 43 }, { 43, 61 }, { 60, 44 }, { 44, 60 },
+ { 59, 45 }, { 45, 59 }, { 58, 46 }, { 46, 58 },
+ { 57, 47 }, { 47, 57 }, { 56, 48 }, { 48, 56 },
+ { 55, 49 }, { 49, 55 }, { 54, 50 }, { 50, 54 },
+ { 53, 51 }, { 51, 53 }, { 52, 52 }, { 63, 42 },
+ { 42, 63 }, { 62, 43 }, { 43, 62 }, { 61, 44 },
+ { 44, 61 }, { 60, 45 }, { 45, 60 }, { 59, 46 },
+ { 46, 59 }, { 58, 47 }, { 47, 58 }, { 57, 48 },
+ { 48, 57 }, { 56, 49 }, { 49, 56 }, { 55, 50 },
+ { 50, 55 }, { 54, 51 }, { 51, 54 }, { 53, 52 },
+ { 52, 53 }, { 63, 43 }, { 43, 63 }, { 62, 44 },
+ { 44, 62 }, { 61, 45 }, { 45, 61 }, { 60, 46 },
+ { 46, 60 }, { 59, 47 }, { 47, 59 }, { 58, 48 },
+ { 48, 58 }, { 57, 49 }, { 49, 57 }, { 56, 50 },
+ { 50, 56 }, { 55, 51 }, { 51, 55 }, { 54, 52 },
+ { 52, 54 }, { 53, 53 }, { 63, 44 }, { 44, 63 },
+ { 62, 45 }, { 45, 62 }, { 61, 46 }, { 46, 61 },
+ { 60, 47 }, { 47, 60 }, { 59, 48 }, { 48, 59 },
+ { 58, 49 }, { 49, 58 }, { 57, 50 }, { 50, 57 },
+ { 56, 51 }, { 51, 56 }, { 55, 52 }, { 52, 55 },
+ { 54, 53 }, { 53, 54 }, { 63, 45 }, { 45, 63 },
+ { 62, 46 }, { 46, 62 }, { 61, 47 }, { 47, 61 },
+ { 60, 48 }, { 48, 60 }, { 59, 49 }, { 49, 59 },
+ { 58, 50 }, { 50, 58 }, { 57, 51 }, { 51, 57 },
+ { 56, 52 }, { 52, 56 }, { 55, 53 }, { 53, 55 },
+ { 54, 54 }, { 63, 46 }, { 46, 63 }, { 62, 47 },
+ { 47, 62 }, { 61, 48 }, { 48, 61 }, { 60, 49 },
+ { 49, 60 }, { 59, 50 }, { 50, 59 }, { 58, 51 },
+ { 51, 58 }, { 57, 52 }, { 52, 57 }, { 56, 53 },
+ { 53, 56 }, { 55, 54 }, { 54, 55 }, { 63, 47 },
+ { 47, 63 }, { 62, 48 }, { 48, 62 }, { 61, 49 },
+ { 49, 61 }, { 60, 50 }, { 50, 60 }, { 59, 51 },
+ { 51, 59 }, { 58, 52 }, { 52, 58 }, { 57, 53 },
+ { 53, 57 }, { 56, 54 }, { 54, 56 }, { 55, 55 },
+ { 63, 48 }, { 48, 63 }, { 62, 49 }, { 49, 62 },
+ { 61, 50 }, { 50, 61 }, { 60, 51 }, { 51, 60 },
+ { 59, 52 }, { 52, 59 }, { 58, 53 }, { 53, 58 },
+ { 57, 54 }, { 54, 57 }, { 56, 55 }, { 55, 56 },
+ { 63, 49 }, { 49, 63 }, { 62, 50 }, { 50, 62 },
+ { 61, 51 }, { 51, 61 }, { 60, 52 }, { 52, 60 },
+ { 59, 53 }, { 53, 59 }, { 58, 54 }, { 54, 58 },
+ { 57, 55 }, { 55, 57 }, { 56, 56 }, { 63, 50 },
+ { 50, 63 }, { 62, 51 }, { 51, 62 }, { 61, 52 },
+ { 52, 61 }, { 60, 53 }, { 53, 60 }, { 59, 54 },
+ { 54, 59 }, { 58, 55 }, { 55, 58 }, { 57, 56 },
+ { 56, 57 }, { 63, 51 }, { 51, 63 }, { 62, 52 },
+ { 52, 62 }, { 61, 53 }, { 53, 61 }, { 60, 54 },
+ { 54, 60 }, { 59, 55 }, { 55, 59 }, { 58, 56 },
+ { 56, 58 }, { 57, 57 }, { 63, 52 }, { 52, 63 },
+ { 62, 53 }, { 53, 62 }, { 61, 54 }, { 54, 61 },
+ { 60, 55 }, { 55, 60 }, { 59, 56 }, { 56, 59 },
+ { 58, 57 }, { 57, 58 }, { 63, 53 }, { 53, 63 },
+ { 62, 54 }, { 54, 62 }, { 61, 55 }, { 55, 61 },
+ { 60, 56 }, { 56, 60 }, { 59, 57 }, { 57, 59 },
+ { 58, 58 }, { 63, 54 }, { 54, 63 }, { 62, 55 },
+ { 55, 62 }, { 61, 56 }, { 56, 61 }, { 60, 57 },
+ { 57, 60 }, { 59, 58 }, { 58, 59 }, { 63, 55 },
+ { 55, 63 }, { 62, 56 }, { 56, 62 }, { 61, 57 },
+ { 57, 61 }, { 60, 58 }, { 58, 60 }, { 59, 59 },
+ { 63, 56 }, { 56, 63 }, { 62, 57 }, { 57, 62 },
+ { 61, 58 }, { 58, 61 }, { 60, 59 }, { 59, 60 },
+ { 63, 57 }, { 57, 63 }, { 62, 58 }, { 58, 62 },
+ { 61, 59 }, { 59, 61 }, { 60, 60 }, { 63, 58 },
+ { 58, 63 }, { 62, 59 }, { 59, 62 }, { 61, 60 },
+ { 60, 61 }, { 63, 59 }, { 59, 63 }, { 62, 60 },
+ { 60, 62 }, { 61, 61 }, { 63, 60 }, { 60, 63 },
+ { 62, 61 }, { 61, 62 }, { 63, 61 }, { 61, 63 },
+ { 62, 62 }, { 63, 62 }, { 62, 63 }, { 63, 63 }
+};
+
+static const int daala_layout_offset4x4[] = { 0 };
+static const int daala_layout_offset8x8[] = { 0, 8, 16 };
+static const int daala_layout_offset16x16[] = { 0, 32, 64 };
+static const int daala_layout_offset32x32[] = { 0, 128 };
+static const int daala_layout_offset64x64[] = { 0 };
+
+static const int daala_band_offset4x4[] = { 1, 16 };
+static const int daala_band_offset8x8[] = { 1, 16, 24, 32, 64 };
+static const int daala_band_offset16x16[] = { 1, 16, 24, 32, 64, 96, 128, 256 };
+static const int daala_band_offset32x32[] = { 1, 16, 24, 32, 64, 96, 128, 256, 384, 512 };
+static const int daala_band_offset64x64[] = { 1, 16, 24, 32, 64, 96, 128, 256, 384, 512 };
+
+const DaalaBandLayout daala_layouts[] = {
+ {
+ daala_zigzag4x4,
+ FF_ARRAY_ELEMS(daala_zigzag4x4),
+ daala_layout_offset4x4,
+ FF_ARRAY_ELEMS(daala_layout_offset4x4),
+ daala_band_offset4x4,
+ FF_ARRAY_ELEMS(daala_band_offset4x4) - 1
+ },
+ {
+ daala_zigzag8x8,
+ FF_ARRAY_ELEMS(daala_zigzag8x8),
+ daala_layout_offset8x8,
+ FF_ARRAY_ELEMS(daala_layout_offset8x8),
+ daala_band_offset8x8,
+ FF_ARRAY_ELEMS(daala_band_offset8x8) - 1
+ },
+ {
+ daala_zigzag16x16,
+ FF_ARRAY_ELEMS(daala_zigzag16x16),
+ daala_layout_offset16x16,
+ FF_ARRAY_ELEMS(daala_layout_offset16x16),
+ daala_band_offset16x16,
+ FF_ARRAY_ELEMS(daala_band_offset16x16) - 1
+ },
+ {
+ daala_zigzag32x32,
+ FF_ARRAY_ELEMS(daala_zigzag32x32),
+ daala_layout_offset32x32,
+ FF_ARRAY_ELEMS(daala_layout_offset32x32),
+ daala_band_offset32x32,
+ FF_ARRAY_ELEMS(daala_band_offset32x32) - 1
+ },
+ {
+ daala_zigzag64x64,
+ FF_ARRAY_ELEMS(daala_zigzag64x64),
+ daala_layout_offset64x64,
+ FF_ARRAY_ELEMS(daala_layout_offset64x64),
+ daala_band_offset64x64,
+ FF_ARRAY_ELEMS(daala_band_offset64x64) - 1
+ }
+};
+
+static const double daala_basis_qm_mag_4x4_l[] = {
+ 0.870774f, 0.872037f, 0.949493f, 0.947936f
+};
+
+static const double daala_basis_qm_mag_8x8_l[] = {
+ 0.936496f, 0.892830f, 0.938452f, 0.970087f,
+ 0.974272f, 0.967954f, 0.974035f, 0.990480f
+};
+static const double daala_basis_qm_mag_16x16_l[] = {
+ 0.968807f, 0.940969f, 0.947977f, 0.957741f,
+ 0.969762f, 0.978644f, 0.984885f, 0.988009f,
+ 0.987424f, 0.985569f, 0.984215f, 0.984462f,
+ 0.987205f, 0.991415f, 0.994985f, 0.998237f
+};
+static const double daala_basis_qm_mag_32x32_l[] = {
+ 0.985068f, 0.970006f, 0.969893f, 0.973192f,
+ 0.973444f, 0.975881f, 0.979601f, 0.981070f,
+ 0.984989f, 0.987520f, 0.988830f, 0.990983f,
+ 0.992376f, 0.992884f, 0.993447f, 0.993381f,
+ 0.993712f, 0.994060f, 0.993294f, 0.992392f,
+ 0.991338f, 0.992410f, 0.992051f, 0.993874f,
+ 0.993488f, 0.994162f, 0.995318f, 0.995925f,
+ 0.997475f, 0.999027f, 0.998303f, 1.001413f,
+};
+static const double daala_basis_qm_mag_64x64_l[] = {
+ 0.992453f, 0.984930f, 0.985137f, 0.985029f,
+ 0.985514f, 0.985784f, 0.986269f, 0.986854f,
+ 0.989932f, 0.987780f, 0.988269f, 0.989175f,
+ 0.989951f, 0.990466f, 0.991145f, 0.991839f,
+ 0.990773f, 0.993191f, 0.993618f, 0.994221f,
+ 0.994662f, 0.995259f, 0.995826f, 0.995996f,
+ 0.999070f, 0.996624f, 0.996835f, 0.996948f,
+ 0.997022f, 0.996973f, 0.996993f, 0.996996f,
+ 0.996871f, 0.996828f, 0.996598f, 0.996688f,
+ 0.996845f, 0.996407f, 0.996327f, 0.996435f,
+ 0.999173f, 0.996216f, 0.995981f, 0.996173f,
+ 0.996595f, 0.996334f, 0.996512f, 0.996627f,
+ 0.994976f, 0.997113f, 0.997248f, 0.997548f,
+ 0.997943f, 0.998121f, 0.998291f, 0.998687f,
+ 1.001696f, 0.999133f, 0.999315f, 0.999621f,
+ 0.999745f, 0.999905f, 0.999936f, 1.000075f
+};
+
+static const double daala_basis_qm_mag_4x4_c420[] = {
+ 0.870774f, 0.872037f, 0.949493f, 0.947936f
+};
+static const double daala_basis_qm_mag_8x8_c420[] = {
+ 0.936496f, 0.892830f, 0.938452f, 0.970087f,
+ 0.974272f, 0.967954f, 0.974035f, 0.990480f
+};
+static const double daala_basis_qm_mag_16x16_c420[] = {
+ 0.968807f, 0.940969f, 0.947977f, 0.957741f,
+ 0.969762f, 0.978644f, 0.984885f, 0.988009f,
+ 0.987424f, 0.985569f, 0.984215f, 0.984462f,
+ 0.987205f, 0.991415f, 0.994985f, 0.998237f
+};
+static const double daala_basis_qm_mag_32x32_c420[] = {
+ 0.985068f, 0.970006f, 0.969893f, 0.973192f,
+ 0.973444f, 0.975881f, 0.979601f, 0.981070f,
+ 0.984989f, 0.987520f, 0.988830f, 0.990983f,
+ 0.992376f, 0.992884f, 0.993447f, 0.993381f,
+ 0.993712f, 0.994060f, 0.993294f, 0.992392f,
+ 0.991338f, 0.992410f, 0.992051f, 0.993874f,
+ 0.993488f, 0.994162f, 0.995318f, 0.995925f,
+ 0.997475f, 0.999027f, 0.998303f, 1.001413f
+};
+static const double daala_basis_qm_mag_64x64_c420[] = {
+ 0.992453f, 0.984930f, 0.985137f, 0.985029f,
+ 0.985514f, 0.985784f, 0.986269f, 0.986854f,
+ 0.989932f, 0.987780f, 0.988269f, 0.989175f,
+ 0.989951f, 0.990466f, 0.991145f, 0.991839f,
+ 0.990773f, 0.993191f, 0.993618f, 0.994221f,
+ 0.994662f, 0.995259f, 0.995826f, 0.995996f,
+ 0.999070f, 0.996624f, 0.996835f, 0.996948f,
+ 0.997022f, 0.996973f, 0.996993f, 0.996996f,
+ 0.996871f, 0.996828f, 0.996598f, 0.996688f,
+ 0.996845f, 0.996407f, 0.996327f, 0.996435f,
+ 0.999173f, 0.996216f, 0.995981f, 0.996173f,
+ 0.996595f, 0.996334f, 0.996512f, 0.996627f,
+ 0.994976f, 0.997113f, 0.997248f, 0.997548f,
+ 0.997943f, 0.998121f, 0.998291f, 0.998687f,
+ 1.001696f, 0.999133f, 0.999315f, 0.999621f,
+ 0.999745f, 0.999905f, 0.999936f, 1.000075f
+};
+
+const double *daala_basis_qm_mag[2][DAALA_NBSIZES+1] = {
+ {
+ daala_basis_qm_mag_4x4_l,
+ daala_basis_qm_mag_8x8_l,
+ daala_basis_qm_mag_16x16_l,
+ daala_basis_qm_mag_32x32_l,
+ daala_basis_qm_mag_64x64_l
+ },
+ {
+ daala_basis_qm_mag_4x4_c420,
+ daala_basis_qm_mag_8x8_c420,
+ daala_basis_qm_mag_16x16_c420,
+ daala_basis_qm_mag_32x32_c420,
+ daala_basis_qm_mag_64x64_c420
+ }
+};
+
+static const double daala_pvq_beta_4x4_l[] = {1.0f};
+static const double daala_pvq_beta_8x8_l[] = {1.0f, 1.0f, 1.0f, 1.0f};
+static const double daala_pvq_beta_16x16_l[] = {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f};
+static const double daala_pvq_beta_32x32_l[] = {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f};
+static const double daala_pvq_beta_64x64_l[] = {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f};
+
+static const double daala_pvq_beta_4x4_am_l[] = {1.0f};
+static const double daala_pvq_beta_8x8_am_l[] = {1.5f, 1.5f, 1.5f, 1.5f};
+static const double daala_pvq_beta_16x16_am_l[] = {1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f};
+static const double daala_pvq_beta_32x32_am_l[] = {1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f};
+static const double daala_pvq_beta_64x64_am_l[] = {1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f, 1.5f};
+
+static const double daala_pvq_beta_4x4_c[] = {1.0f};
+static const double daala_pvq_beta_8x8_c[] = {1.0f, 1.0f, 1.0f, 1.0f};
+static const double daala_pvq_beta_16x16_c[] = {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f};
+static const double daala_pvq_beta_32x32_c[] = {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f};
+static const double daala_pvq_beta_64x64_c[] = {1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f};
+
+const double *const daala_pvq_beta[2][DAALA_MAX_PLANES][DAALA_NBSIZES + 1] = {
+ {
+ {
+ daala_pvq_beta_4x4_l,
+ daala_pvq_beta_8x8_l,
+ daala_pvq_beta_16x16_l,
+ daala_pvq_beta_32x32_l,
+ daala_pvq_beta_64x64_l
+ },
+ {
+ daala_pvq_beta_4x4_c,
+ daala_pvq_beta_8x8_c,
+ daala_pvq_beta_16x16_c,
+ daala_pvq_beta_32x32_c,
+ daala_pvq_beta_64x64_c
+ },
+ {
+ daala_pvq_beta_4x4_c,
+ daala_pvq_beta_8x8_c,
+ daala_pvq_beta_16x16_c,
+ daala_pvq_beta_32x32_c,
+ daala_pvq_beta_64x64_c
+ },
+ {
+ daala_pvq_beta_4x4_c,
+ daala_pvq_beta_8x8_c,
+ daala_pvq_beta_16x16_c,
+ daala_pvq_beta_32x32_c,
+ daala_pvq_beta_64x64_c
+ }
+ },
+ {
+ {
+ daala_pvq_beta_4x4_am_l,
+ daala_pvq_beta_8x8_am_l,
+ daala_pvq_beta_16x16_am_l,
+ daala_pvq_beta_32x32_am_l,
+ daala_pvq_beta_64x64_am_l
+ },
+ {
+ daala_pvq_beta_4x4_c,
+ daala_pvq_beta_8x8_c,
+ daala_pvq_beta_16x16_c,
+ daala_pvq_beta_32x32_c,
+ daala_pvq_beta_64x64_c
+ },
+ {
+ daala_pvq_beta_4x4_c,
+ daala_pvq_beta_8x8_c,
+ daala_pvq_beta_16x16_c,
+ daala_pvq_beta_32x32_c,
+ daala_pvq_beta_64x64_c
+ },
+ {
+ daala_pvq_beta_4x4_c,
+ daala_pvq_beta_8x8_c,
+ daala_pvq_beta_16x16_c,
+ daala_pvq_beta_32x32_c,
+ daala_pvq_beta_64x64_c
+ }
+ }
+};
diff --git a/libavcodec/daalatab.h b/libavcodec/daalatab.h
new file mode 100644
index 0000000..6a4eb34
--- /dev/null
+++ b/libavcodec/daalatab.h
@@ -0,0 +1,85 @@
+/*
+ * Daala video decoder
+ *
+ * Copyright (C) 2015 Rostislav Pehlivanov <atomnuker at gmail.com>
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#ifndef AVCODEC_DAALATAB_H
+#define AVCODEC_DAALATAB_H
+
+#include "daala.h"
+
+typedef uint8_t daala_u8_doublet[2];
+
+typedef struct DaalaBandLayout {
+ const daala_u8_doublet *const tab;
+ const int tab_size;
+ const int *layout_offset;
+ const int layout_offset_size;
+ const int *band_offset;
+ const int band_offset_size;
+} DaalaBandLayout;
+
+struct DaalaPixFmts {
+ enum AVPixelFormat fmt;
+ int planes, depth, depth_mode;
+ int dec[DAALA_MAX_PLANES][2];
+};
+
+extern const struct DaalaPixFmts daala_valid_formats[];
+extern const int daala_valid_formats_num;
+
+const inline struct DaalaPixFmts *daala_find_p_format(enum AVPixelFormat fmt)
+{
+ int i;
+ for (i = 0; i < daala_valid_formats_num ; i++)
+ if (daala_valid_formats[i].fmt == fmt)
+ return &daala_valid_formats[i];
+ return NULL;
+}
+
+/* Haar "quantization matrix" for each decomposition level */
+extern const int daala_haar_qm[][DAALA_LOG_BSIZE_MAX];
+
+/* Chroma from luma scaling */
+extern const uint16_t daaladsp_cfl_scale[4][4];
+
+/* Quantization matrices, currently only flat (0) and HVS(1) */
+extern const int16_t *const daala_qmatrices[];
+
+/* Haar basis scaling compensation, [0] - x,y; [1] - diag */
+extern const int32_t daala_dc_comp[][2];
+
+/* Maps quantizers */
+extern const int daala_quant_codemap[];
+extern const int daala_quant_codemap_size;
+
+extern const ent_rng daalaent_cdf_tab[];
+
+extern const ent_rng daalaent_cdf_exp_tab[][16];
+extern const ent_rng daalaent_laplace_offset[];
+
+extern const DaalaBandLayout daala_layouts[];
+
+extern const double *daala_basis_qm_mag[2][DAALA_NBSIZES+1];
+
+/* PVQ beta angles for AM */
+extern const double *const daala_pvq_beta[2][DAALA_MAX_PLANES][DAALA_NBSIZES + 1];
+
+#endif /* AVCODEC_DAALATAB_H */
--
2.6.4
More information about the ffmpeg-devel
mailing list