Added embed entropy
This commit is contained in:
parent
6c30607e78
commit
7d8cb589ec
9 changed files with 148 additions and 37 deletions
7
Cargo.lock
generated
7
Cargo.lock
generated
|
@ -128,6 +128,12 @@ dependencies = [
|
|||
"subtle",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "entropy"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d68716e45ef572f351be6fad93a7bbf35242b4289a2ff75434032e5d73d74cc2"
|
||||
|
||||
[[package]]
|
||||
name = "fdeflate"
|
||||
version = "0.3.4"
|
||||
|
@ -230,6 +236,7 @@ dependencies = [
|
|||
"argon2",
|
||||
"bitvec",
|
||||
"crc",
|
||||
"entropy",
|
||||
"getopts",
|
||||
"png",
|
||||
"rand",
|
||||
|
|
|
@ -15,6 +15,7 @@ path = "src/png_data/main.rs"
|
|||
argon2 = "0.5.3"
|
||||
bitvec = "1.0.1"
|
||||
crc = "3.2.1"
|
||||
entropy = "0.4.2"
|
||||
getopts = "0.2.21"
|
||||
png = "0.17.13"
|
||||
rand = "0.8.5"
|
||||
|
|
12
README.md
12
README.md
|
@ -4,7 +4,7 @@
|
|||
|
||||
![TeX Live english documentation](doc/texlive_en.png)
|
||||
|
||||
`png_data` transforms a file as a png image.
|
||||
`png_data` encodes a file into a png image.
|
||||
|
||||
### Encoding
|
||||
`png_data -l rgb8 -e file.pdf -o output.png -c "(.pdf) documentation"`
|
||||
|
@ -52,6 +52,10 @@ Where:
|
|||
* `output.png` the resulting PNG file
|
||||
* `"(.tar) archive"` an optional comment
|
||||
|
||||
**Additional Options**
|
||||
* `-s|--seed TXT` Sets the random seed for determining the payload blocks. By default the random seed is "WIDTHxHEIGHT" where WIDTH and HEIGHT are the original image's dimensions.
|
||||
* `-n|--entropy` Fills unused payload blocks with random data that tries to match the payload's entropy. This feature is experimental and may not fully protect against entropy based steganography-detection. We highlihy recommend that the payload has maximal entropy, which can be achieved by compressing it.
|
||||
|
||||
### Decoding an image
|
||||
`png_embed -l lo2 -d image.png -o embed.tar`
|
||||
Where:
|
||||
|
@ -59,12 +63,18 @@ Where:
|
|||
* `image.png` the PNG containing an embed
|
||||
* `embed.tar` the extracted embedded file
|
||||
|
||||
**Additional Options**
|
||||
* `-s|--seed TXT` Sets the random seed for determining the payload blocks. By default the random seed is "WIDTHxHEIGHT" where WIDTH and HEIGHT are the original image's dimensions.
|
||||
|
||||
### Getting header information
|
||||
`png_embed -l lo2 -z output.png`
|
||||
* `lo2` is the `Lo` algorithm using the 2 lowest bits
|
||||
* `output.png` a `png_embed` encoded image
|
||||
This will display the header of the encoded file, as well as the comment.
|
||||
|
||||
**Additional Options**
|
||||
* `-s|--seed TXT` Sets the random seed for determining the payload blocks. By default the random seed is "WIDTHxHEIGHT" where WIDTH and HEIGHT are the original image's dimensions.
|
||||
|
||||
# License
|
||||
|
||||
png_data is licensed under the GNU AGPL version 3 or later. See [LICENSE.md](LICENSE.md) for more information.
|
||||
|
|
|
@ -7,7 +7,7 @@ PNG_EMBED=../target/debug/png_embed
|
|||
echo "Encoding..."
|
||||
for i in {1..7}; do
|
||||
echo "Writing dec-lo${i}.."
|
||||
$PNG_EMBED -l lo${i} -e embed.png input.png -o out-lo${i}.png
|
||||
$PNG_EMBED -l lo${i} -n -e embed.png input.png -o out-lo${i}.png
|
||||
done
|
||||
|
||||
echo "Decoding..."
|
||||
|
|
|
@ -10,7 +10,8 @@ pub trait Decode {
|
|||
|
||||
/// Decode the data from an iterator
|
||||
fn decode<I>(it: &mut I) -> Result<Self::Type, String>
|
||||
where I: Iterator<Item = (usize, u8)>;
|
||||
where
|
||||
I: Iterator<Item = (usize, u8)>;
|
||||
}
|
||||
|
||||
/// The program's version.
|
||||
|
@ -69,7 +70,7 @@ impl Header {
|
|||
}
|
||||
|
||||
impl Encode for Header {
|
||||
fn encode(&self, vec: &mut Vec<u8>) {
|
||||
fn encode(&self, vec: &mut Vec<u8>) {
|
||||
// Version
|
||||
vec.extend_from_slice((self.version as u16).to_le_bytes().as_slice());
|
||||
|
||||
|
@ -87,14 +88,16 @@ impl Encode for Header {
|
|||
if let Some(comment) = &self.comment {
|
||||
vec.extend_from_slice(comment.as_bytes());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Decode for Header {
|
||||
type Type = Header;
|
||||
type Type = Header;
|
||||
|
||||
fn decode<I>(it: &mut I) -> Result<Self::Type, String>
|
||||
where I: Iterator<Item = (usize, u8)> {
|
||||
fn decode<I>(it: &mut I) -> Result<Self::Type, String>
|
||||
where
|
||||
I: Iterator<Item = (usize, u8)>,
|
||||
{
|
||||
let mut count = 0;
|
||||
let mut next = || -> Result<u8, String> {
|
||||
let result = it
|
||||
|
@ -117,7 +120,7 @@ impl Decode for Header {
|
|||
|
||||
Some(
|
||||
String::from_utf8(comment_data)
|
||||
.map_err(|e| format!("Failed to retrieve comment: {e}"))?,
|
||||
.map_err(|e| format!("Failed to retrieve comment: {e}"))?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
|
@ -129,7 +132,7 @@ impl Decode for Header {
|
|||
data_crc,
|
||||
comment,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -45,7 +45,7 @@ fn str_to_layout(layout: &str) -> Result<(ColorType, BitDepth), String> {
|
|||
let split = layout
|
||||
.char_indices()
|
||||
.find(|(_, c)| c.is_ascii_digit())
|
||||
.ok_or(format!("Unable to find number for layout's bit depth"))?
|
||||
.ok_or("Unable to find number for layout's bit depth".to_string())?
|
||||
.0;
|
||||
match layout.split_at(split) {
|
||||
("rgb", bits) => match bits {
|
||||
|
@ -99,8 +99,7 @@ fn best_layout(size: u64, bits_per_pixel: u8) -> (u32, u32) {
|
|||
}
|
||||
|
||||
/// Gets the minimum image buffer size in bytes
|
||||
fn minimum_size(color: ColorType, depth: BitDepth, width: u32, height: u32) -> usize
|
||||
{
|
||||
fn minimum_size(color: ColorType, depth: BitDepth, width: u32, height: u32) -> usize {
|
||||
let samples = width as usize * color.samples();
|
||||
(match depth {
|
||||
BitDepth::Sixteen => samples * 2,
|
||||
|
@ -137,15 +136,12 @@ fn encode(input: String, output: String, layout: String, matches: Matches) -> Re
|
|||
eprintln!("==============");
|
||||
|
||||
let bits_per_pixel = bits_per_pixel(layout.0, layout.1);
|
||||
let (width, height) = best_layout(
|
||||
(data.len() + input_data.len()) as u64,
|
||||
bits_per_pixel
|
||||
);
|
||||
let (width, height) = best_layout((data.len() + input_data.len()) as u64, bits_per_pixel);
|
||||
|
||||
// Encode
|
||||
let output_file = File::create(&output)
|
||||
.map_err(|err| format!("Failed to open output file `{output}`: {err}"))?;
|
||||
let ref mut w = BufWriter::new(output_file);
|
||||
let w = &mut BufWriter::new(output_file);
|
||||
let mut encoder = png::Encoder::new(w, width, height);
|
||||
encoder.set_color(layout.0);
|
||||
encoder.set_depth(layout.1);
|
||||
|
@ -189,9 +185,9 @@ fn decode_header(input: String, _matches: Matches) -> Result<(), String> {
|
|||
|
||||
data.resize(info.buffer_size(), 0);
|
||||
|
||||
|
||||
let mut it = data.iter().enumerate().map(|(idx, byte)| (idx, *byte));
|
||||
let header = Header::decode(&mut it).map_err(|err| format!("Failed to decode header: {err}"))?;
|
||||
let header =
|
||||
Header::decode(&mut it).map_err(|err| format!("Failed to decode header: {err}"))?;
|
||||
eprintln!("=== HEADER ===");
|
||||
eprintln!("Version: {:#?}", header.version);
|
||||
eprintln!(
|
||||
|
@ -219,10 +215,8 @@ fn decode(input: String, output: String, _matches: Matches) -> Result<(), String
|
|||
|
||||
data.resize(info.buffer_size(), 0);
|
||||
|
||||
|
||||
let mut it = data.iter().enumerate().map(|(idx, byte)| (idx, *byte));
|
||||
let header =
|
||||
{
|
||||
let header = {
|
||||
//let mut temp_it = std::mem::take(&mut it);
|
||||
Header::decode(&mut it).map_err(|err| format!("Failed to decode header: {err}"))?
|
||||
};
|
||||
|
@ -236,14 +230,18 @@ fn decode(input: String, output: String, _matches: Matches) -> Result<(), String
|
|||
eprintln!("==============");
|
||||
|
||||
// Check crc
|
||||
let data_start = it.next().ok_or(format!("Failed to get data start"))?.0;
|
||||
let file_data = &data[data_start..data_start+header.data_len as usize];
|
||||
let data_start = it.next().ok_or("Failed to get data start".to_string())?.0;
|
||||
let file_data = &data[data_start..data_start + header.data_len as usize];
|
||||
let crc = Crc::<u32>::new(&crc::CRC_32_CKSUM).checksum(file_data);
|
||||
if crc != header.data_crc {
|
||||
Err(format!("Data CRC[{crc:X}] does not match header CRC[{:X}]", header.data_crc))?;
|
||||
Err(format!(
|
||||
"Data CRC[{crc:X}] does not match header CRC[{:X}]",
|
||||
header.data_crc
|
||||
))?;
|
||||
}
|
||||
|
||||
std::fs::write(&output, file_data).map_err(|err| format!("Failed to write to output file `{output}`: {err}"))?;
|
||||
std::fs::write(&output, file_data)
|
||||
.map_err(|err| format!("Failed to write to output file `{output}`: {err}"))?;
|
||||
println!("File written to `{output}`");
|
||||
|
||||
Ok(())
|
||||
|
|
|
@ -4,6 +4,7 @@ use rand::prelude::SliceRandom;
|
|||
use rand::Rng;
|
||||
|
||||
use crate::embed::EmbedAlgorithm;
|
||||
use crate::ent::EntropyGenerator;
|
||||
|
||||
/// Gets the best blocksize (i.e. that minimize remaining space) for a certain data length.
|
||||
/// The blocksize is a number in range [16, 65536]
|
||||
|
@ -27,6 +28,7 @@ pub struct BlockPlacement<'a> {
|
|||
algorithm: &'a EmbedAlgorithm,
|
||||
data: &'a mut [u8],
|
||||
block_size: usize,
|
||||
pub used_blocks: usize,
|
||||
pub blocks: Vec<usize>,
|
||||
}
|
||||
|
||||
|
@ -68,14 +70,16 @@ impl<'a> BlockPlacement<'a> {
|
|||
blocks.shuffle(rng);
|
||||
|
||||
// Only keep the first blocks_num blocks
|
||||
blocks.resize(blocks_num, 0);
|
||||
//blocks.resize(blocks_num, 0);
|
||||
|
||||
Ok(Self {
|
||||
let s = Self {
|
||||
algorithm,
|
||||
data,
|
||||
block_size,
|
||||
used_blocks: blocks_num,
|
||||
blocks,
|
||||
})
|
||||
};
|
||||
Ok(s)
|
||||
}
|
||||
|
||||
// Embeds the data into the original image
|
||||
|
@ -93,7 +97,7 @@ impl<'a> BlockPlacement<'a> {
|
|||
let mut index = 0;
|
||||
match self.algorithm {
|
||||
EmbedAlgorithm::Lo(bits) => {
|
||||
for block in &self.blocks {
|
||||
for block in &self.blocks[0..self.used_blocks] {
|
||||
for i in 0..self.block_size {
|
||||
let pos = block * self.block_size + i;
|
||||
let hi = std::cmp::min(*bits as usize, embed.len() - index);
|
||||
|
@ -107,6 +111,26 @@ impl<'a> BlockPlacement<'a> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Fills unused blocks with randomly generated data
|
||||
pub fn fill_unused<R>(&mut self, mut gen: EntropyGenerator<R>)
|
||||
where
|
||||
R: Rng,
|
||||
{
|
||||
match self.algorithm {
|
||||
EmbedAlgorithm::Lo(bits) => {
|
||||
let mask: u8 = (1 << bits) - 1;
|
||||
for block in &self.blocks[self.used_blocks..] {
|
||||
for i in 0..self.block_size {
|
||||
let pos = block * self.block_size + i;
|
||||
|
||||
self.data[pos] &= !mask;
|
||||
self.data[pos] |= gen.next() & mask;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Iterator over blocks in the resulting image
|
||||
|
|
53
src/png_embed/ent.rs
Normal file
53
src/png_embed/ent.rs
Normal file
|
@ -0,0 +1,53 @@
|
|||
use rand::distributions::WeightedIndex;
|
||||
use rand::prelude::Distribution;
|
||||
use rand::Rng;
|
||||
|
||||
pub struct EntropyGenerator<R>
|
||||
where
|
||||
R: Rng,
|
||||
{
|
||||
rng: R,
|
||||
dist: WeightedIndex<f64>,
|
||||
}
|
||||
|
||||
/// Genrates random bytes with a set entropy
|
||||
impl<R: Rng> EntropyGenerator<R> {
|
||||
// FIXME: Bad entropy
|
||||
pub fn new(entropy: f64, rng: R) -> Self {
|
||||
// FIXME: Does not work for entropy below 1.0
|
||||
let n = (2.0f64.powf(entropy)).round() as usize;
|
||||
|
||||
let mut probabilities = std::iter::repeat(1.0f64).take(n).collect::<Vec<_>>();
|
||||
let sum = probabilities.iter().sum::<f64>();
|
||||
probabilities.iter_mut().for_each(|p| *p /= sum);
|
||||
|
||||
let dist = WeightedIndex::new(&probabilities).unwrap();
|
||||
|
||||
Self { rng, dist }
|
||||
}
|
||||
|
||||
pub fn next(&mut self) -> u8 { self.dist.sample(&mut self.rng) as u8 }
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use entropy::shannon_entropy;
|
||||
use rand::SeedableRng;
|
||||
use rand_chacha::ChaCha8Rng;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_entropy() {
|
||||
for i in 1..8 {
|
||||
let mut gen = EntropyGenerator::new(i as f64, ChaCha8Rng::from_entropy());
|
||||
|
||||
let mut data = Vec::with_capacity(1024);
|
||||
for _ in 0..1024 {
|
||||
data.push(gen.next());
|
||||
}
|
||||
|
||||
assert!((shannon_entropy(data) - i as f32).abs() < 0.2);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,7 +1,8 @@
|
|||
pub mod block;
|
||||
pub mod embed;
|
||||
pub mod header;
|
||||
pub mod image;
|
||||
mod block;
|
||||
mod embed;
|
||||
mod ent;
|
||||
mod header;
|
||||
mod image;
|
||||
|
||||
use std::env;
|
||||
use std::fs::File;
|
||||
|
@ -16,6 +17,7 @@ use block::BlockPlacement;
|
|||
use block::BlockPlacementIterator;
|
||||
use crc::Crc;
|
||||
use embed::EmbedAlgorithm;
|
||||
use ent::EntropyGenerator;
|
||||
use getopts::Matches;
|
||||
use getopts::Options;
|
||||
use header::Decode;
|
||||
|
@ -152,6 +154,14 @@ fn encode(
|
|||
eprintln!("==============");
|
||||
|
||||
placement.write_embed(embed_data.as_slice().view_bits::<Lsb0>());
|
||||
if matches.opt_present("n") {
|
||||
let ent = entropy::shannon_entropy(&embed_data);
|
||||
println!("Payload entropy: {ent}\nFilling image remainder with random data...");
|
||||
placement.fill_unused(EntropyGenerator::new(
|
||||
ent as f64,
|
||||
ChaCha8Rng::from_entropy(),
|
||||
))
|
||||
}
|
||||
|
||||
let outfile = File::create(&output).unwrap();
|
||||
let w = &mut BufWriter::new(Box::new(outfile) as Box<dyn Write>);
|
||||
|
@ -256,6 +266,11 @@ fn main() -> ExitCode {
|
|||
);
|
||||
opts.optflag("z", "info", "Read header");
|
||||
opts.optopt("l", "algorithm", "Embed algorithm", "lo3");
|
||||
opts.optflag(
|
||||
"n",
|
||||
"entropy",
|
||||
"Attempts to hide payload by modifying the file's entropy",
|
||||
);
|
||||
opts.optflag("h", "help", "Print this help menu");
|
||||
opts.optflag("v", "version", "Print program version and licenses");
|
||||
|
||||
|
|
Loading…
Reference in a new issue