Stega64 encodes and decodes messages in image pixels.
Stega64.encode({ source: HTMLImageElement | HTMLCanvasElement; messages: string[]; encoding: Stega64Encoding; encodeMetadata: boolean; minWidth?: number; minHeight?: number; borderWidth?: number; }): HTMLCanvasElement
const source = await loadImageFromImageUrl({
url: "./example.jpg"
});
Stega64.encode({
source,
encoding: "",
encodeMetadata: ,
messages: [""],
minWidth: ,
minHeight: ,
borderWidth: ,
aspectRatio:
})
Stega64.decode({ source: HTMLImageElement | HTMLCanvasElement; encoding: Stega64Encoding; borderWidth?: number; }): String
const result = Stega64.decode({ source });
StegaCassette encodes and decodes audio in image pixels.
StegaCassette.encode({ source: HTMLImageElement; audioBuffers: Float32Array[]; sampleRate: number; bitDepth: StegaCassetteBitDepth; encoding: StegaCassetteEncoding; encodeMetadata?: boolean; aspectRatio?: number; borderWidth?: number; }): HTMLCanvasElement
const audioBuffers = await loadAudioBuffersFromAudioUrl({
url: "./example.mp3",
audioContext,
channels: ,
sampleRate: ,
});
const result = StegaCassette.encode({
source,
audioBuffers,
sampleRate: ,
bitDepth: ,
encoding: "",
encodeMetadata: ,
aspectRatio: ,
borderWidth:
});
StegaCassette.decode({ source: HTMLImageElement | HTMLCanvasElement; bitDepth: StegaCassetteBitDepth; channels: StegaCassetteChannels; encoding: StegaCassetteEncoding; borderWidth?: number; }): Float32Array[]
const metadata = StegaMetadata.decode({ source }) || {};
...
const audioBuffers = StegaCassette.decode({
source,
bitDepth: metadata.bitDepth || ,
channels: metadata.channels || ,
encoding: metadata.encoding || "",
});
const audio = await playDecodedAudioBuffers({
audioBuffers,
audioContext,
sampleRate: metadata.sampleRate || ,
});
StegaMetadata can be optionally encoded inside of stega images.
StegaMetadata.encode({ source: HTMLCanvasElement; metadata: StegaMetadata }): HTMLCanvasElement
StegaMetadata.decode({ source: HTMLCanvasElement; }): StegaMetadata | null
enum StegaContentType {
AUDIO = 0,
STRING = 1,
ROBUST = 2,
}
type StegaMetadata =
| StegaMetadataAudio
| StegaMetadataString
| StegaMetadataRobust;
interface StegaMetadataAudio {
type: StegaContentType.AUDIO;
sampleRate: number;
bitDepth: StegaCassetteBitDepth;
channels: StegaCassetteChannels;
encoding: StegaCassetteEncoding;
borderWidth: number;
}
interface StegaMetadataString {
type: StegaContentType.STRING;
messageCount: number;
encoding: Stega64Encoding;
borderWidth: number;
}
interface StegaMetadataRobust {
type: StegaContentType.ROBUST;
redundancyLevel: number;
messageCount: number;
blockSize: 2 | 4;
encoding: "hex" | "base16";
}
Animate stegonographic image
new StegaAnimator({ resolution: number; source: HTMLImageElement | HTMLCanvasElement; fadeAmount?: number; })
const animator = new StegaAnimator({
source,
resolution: ,
fadeAmount:
});
document.body.appendChild(animator.canvas);
await animator.animate({
from: { rotation: Math.PI, scale: 0.0, x: 0.5, y: 0.5, },
to: { rotation: Math.PI * 4, scale: 0.5, x: 0.5, y: 0.5, },
rate: 0.005,
});
const killLoop = animator.animationLoop([
{
from: { rotation: 0, scale: 0.5, x: 0.5, y: 0.5, },
to: { rotation: Math.PI * 1, scale: 0.6, x: 0.5, y: 0.5, },
rate: ,
},
{
from: { rotation: Math.PI * 1, scale: 0.6, x: 0.5, y: 0.5, },
to: { rotation: Math.PI * 2, scale: 0.5, x: 0.5, y: 0.5, },
rate: ,
},
]);
killLoop();
Load an audio buffer from a url string
async loadAudioBuffersFromAudioUrl({ url: string; audioContext: AudioContext; channels: StegaCassetteChannels; sampleRate?: number; }): Promise<Float32Array[]>
const audioContext = new AudioContext();
const audioBuffer = await loadAudioBuffersFromAudioUrl({
url: "./example.mp3",
audioContext,
sampleRate: audioContext.sampleRate,
});
Load an image from a url string
async loadImageFromImageUrl({ url: string }): Promise<HTMLImageElement>
const audioBuffer = await loadImageFromImageUrl({
url: "./example.jpg"
});
Play decoded audio buffers
async playDecodedAudioBuffers({ audioBuffers: Float32Array[]; audioContext: AudioContext; sampleRate?: number; }): Promise<AudioBufferSourceNode>
const source = await playDecodedAudioBuffers({
audioBuffers,
audioContext,
sampleRate: audioContext.sampleRate,
});
source.stop();
Turn an HTML element into a file drop area
createDropReader({ element: HTMLElement; onSuccess: (element: HTMLImageElement | HTMLAudioElement) => void; onFailure?: (message: string) => void; onDragEnter?: () => void; onDragLeave?: () => void; onDrop?: () => void; types?: (AudioType | ImageType)[]; }): void
const element = document.body;
createDropReader({
element,
onSuccess: (image) => element.appendChild(image),
onFailure: (message) => console.error(message),
onDragEnter: () => element.classList.add("droppable"),
onDragLeave: () => element.classList.remove("droppable"),
onDrop: () => element.classList.remove("droppable"),
types: ["image/*"]
});
Turn an HTML input into a file input
createFileReader({ element: HTMLInputElement; onSuccess: (element: HTMLImageElement | HTMLAudioElement) => void; onFailure?: (message: string) => void; types?: (AudioType | ImageType)[]; }): void
const element = document.createElement("input");
createFileReader({
element,
onSuccess: (image) => document.body.appendChild(image),
onFailure: (message) => console.error(message),
types: ["image/*"]
});