128 lines
		
	
	
		
			4.3 KiB
		
	
	
	
		
			C
		
	
	
	
	
	
			
		
		
	
	
			128 lines
		
	
	
		
			4.3 KiB
		
	
	
	
		
			C
		
	
	
	
	
	
/*
 | 
						|
 * FITS image encoder
 | 
						|
 * Copyright (c) 2017 Paras Chadha
 | 
						|
 *
 | 
						|
 * This file is part of FFmpeg.
 | 
						|
 *
 | 
						|
 * FFmpeg is free software; you can redistribute it and/or
 | 
						|
 * modify it under the terms of the GNU Lesser General Public
 | 
						|
 * License as published by the Free Software Foundation; either
 | 
						|
 * version 2.1 of the License, or (at your option) any later version.
 | 
						|
 *
 | 
						|
 * FFmpeg is distributed in the hope that it will be useful,
 | 
						|
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
						|
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
 | 
						|
 * Lesser General Public License for more details.
 | 
						|
 *
 | 
						|
 * You should have received a copy of the GNU Lesser General Public
 | 
						|
 * License along with FFmpeg; if not, write to the Free Software
 | 
						|
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 | 
						|
 */
 | 
						|
 | 
						|
/**
 | 
						|
 * @file
 | 
						|
 * FITS image encoder
 | 
						|
 *
 | 
						|
 * Specification: https://fits.gsfc.nasa.gov/fits_standard.html Version 3.0
 | 
						|
 *
 | 
						|
 * RGBA images are encoded as planes in RGBA order. So, NAXIS3 is 3 or 4 for them.
 | 
						|
 * Also CTYPE3 = 'RGB ' is added to the header to distinguish them from 3d images.
 | 
						|
 */
 | 
						|
 | 
						|
#include "libavutil/intreadwrite.h"
 | 
						|
#include "avcodec.h"
 | 
						|
#include "bytestream.h"
 | 
						|
#include "codec_internal.h"
 | 
						|
#include "encode.h"
 | 
						|
 | 
						|
static int fits_encode_frame(AVCodecContext *avctx, AVPacket *pkt,
 | 
						|
                             const AVFrame *p, int *got_packet)
 | 
						|
{
 | 
						|
    uint8_t *bytestream;
 | 
						|
    const uint16_t flip = (1 << 15);
 | 
						|
    uint64_t data_size = 0, padded_data_size = 0;
 | 
						|
    int ret, bitpix, naxis3 = 1, i, j, k, bytes_left;
 | 
						|
    int map[] = {2, 0, 1, 3}; // mapping from GBRA -> RGBA as RGBA is to be stored in FITS file..
 | 
						|
 | 
						|
    switch (avctx->pix_fmt) {
 | 
						|
    case AV_PIX_FMT_GRAY8:
 | 
						|
    case AV_PIX_FMT_GRAY16BE:
 | 
						|
        map[0] = 0; // grayscale images should be directly mapped
 | 
						|
        if (avctx->pix_fmt == AV_PIX_FMT_GRAY8) {
 | 
						|
            bitpix = 8;
 | 
						|
        } else {
 | 
						|
            bitpix = 16;
 | 
						|
        }
 | 
						|
        break;
 | 
						|
    case AV_PIX_FMT_GBRP:
 | 
						|
    case AV_PIX_FMT_GBRAP:
 | 
						|
        bitpix = 8;
 | 
						|
        if (avctx->pix_fmt == AV_PIX_FMT_GBRP) {
 | 
						|
            naxis3 = 3;
 | 
						|
        } else {
 | 
						|
            naxis3 = 4;
 | 
						|
        }
 | 
						|
        break;
 | 
						|
    case AV_PIX_FMT_GBRP16BE:
 | 
						|
    case AV_PIX_FMT_GBRAP16BE:
 | 
						|
        bitpix = 16;
 | 
						|
        if (avctx->pix_fmt == AV_PIX_FMT_GBRP16BE) {
 | 
						|
            naxis3 = 3;
 | 
						|
        } else {
 | 
						|
            naxis3 = 4;
 | 
						|
        }
 | 
						|
        break;
 | 
						|
    default:
 | 
						|
        av_log(avctx, AV_LOG_ERROR, "unsupported pixel format\n");
 | 
						|
        return AVERROR(EINVAL);
 | 
						|
    }
 | 
						|
 | 
						|
    data_size = (bitpix >> 3) * avctx->height * avctx->width * naxis3;
 | 
						|
    padded_data_size = ((data_size + 2879) / 2880 ) * 2880;
 | 
						|
 | 
						|
    if ((ret = ff_get_encode_buffer(avctx, pkt, padded_data_size, 0)) < 0)
 | 
						|
        return ret;
 | 
						|
 | 
						|
    bytestream       = pkt->data;
 | 
						|
 | 
						|
    for (k = 0; k < naxis3; k++) {
 | 
						|
        for (i = 0; i < avctx->height; i++) {
 | 
						|
            const uint8_t *ptr = p->data[map[k]] + (avctx->height - i - 1) * p->linesize[map[k]];
 | 
						|
            if (bitpix == 16) {
 | 
						|
                for (j = 0; j < avctx->width; j++) {
 | 
						|
                    // subtracting bzero is equivalent to first bit flip
 | 
						|
                    bytestream_put_be16(&bytestream, AV_RB16(ptr) ^ flip);
 | 
						|
                    ptr += 2;
 | 
						|
                }
 | 
						|
            } else {
 | 
						|
                memcpy(bytestream, ptr, avctx->width);
 | 
						|
                bytestream += avctx->width;
 | 
						|
            }
 | 
						|
        }
 | 
						|
    }
 | 
						|
 | 
						|
    bytes_left = padded_data_size - data_size;
 | 
						|
    memset(bytestream, 0, bytes_left);
 | 
						|
 | 
						|
    pkt->flags |= AV_PKT_FLAG_KEY;
 | 
						|
    *got_packet = 1;
 | 
						|
 | 
						|
    return 0;
 | 
						|
}
 | 
						|
 | 
						|
const FFCodec ff_fits_encoder = {
 | 
						|
    .p.name         = "fits",
 | 
						|
    CODEC_LONG_NAME("Flexible Image Transport System"),
 | 
						|
    .p.type         = AVMEDIA_TYPE_VIDEO,
 | 
						|
    .p.id           = AV_CODEC_ID_FITS,
 | 
						|
    .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE,
 | 
						|
    FF_CODEC_ENCODE_CB(fits_encode_frame),
 | 
						|
    .p.pix_fmts     = (const enum AVPixelFormat[]) { AV_PIX_FMT_GBRAP16BE,
 | 
						|
                                                     AV_PIX_FMT_GBRP16BE,
 | 
						|
                                                     AV_PIX_FMT_GBRP,
 | 
						|
                                                     AV_PIX_FMT_GBRAP,
 | 
						|
                                                     AV_PIX_FMT_GRAY16BE,
 | 
						|
                                                     AV_PIX_FMT_GRAY8,
 | 
						|
                                                     AV_PIX_FMT_NONE },
 | 
						|
};
 |