View Javadoc

1   package de.desy.video.sw;
2   
3   //big memory for testing:
4   //-Xms512m -Xmx512m -XX:PermSize=128m -XX:MaxPermSize=128m
5   
6   import java.awt.AlphaComposite;
7   import java.awt.BorderLayout;
8   import java.awt.Canvas;
9   import java.awt.Color;
10  import java.awt.Composite;
11  import java.awt.Container;
12  import java.awt.Dimension;
13  import java.awt.Font;
14  import java.awt.FontMetrics;
15  import java.awt.Graphics;
16  import java.awt.Graphics2D;
17  import java.awt.HeadlessException;
18  import java.awt.Image;
19  import java.awt.MediaTracker;
20  import java.awt.Point;
21  import java.awt.Toolkit;
22  import java.awt.event.ComponentAdapter;
23  import java.awt.event.ComponentEvent;
24  import java.awt.event.MouseAdapter;
25  import java.awt.event.MouseEvent;
26  import java.awt.event.MouseListener;
27  import java.awt.event.MouseMotionAdapter;
28  import java.awt.event.MouseMotionListener;
29  import java.awt.event.MouseWheelListener;
30  import java.awt.image.BufferedImage;
31  import java.awt.image.MemoryImageSource;
32  import java.text.DateFormat;
33  import java.text.SimpleDateFormat;
34  
35  import javax.swing.SwingUtilities;
36  
37  import de.desy.tine.types.IMAGE;
38  import de.desy.tine.types.IMAGE.FrameHeader;
39  
40  /**
41   * <code>ImageDisplayer</code> implements basic, but versatile display of
42   * video images. Five guidelines were set at development startup:
43   * <ul>
44   * <li>pure Java-code, no Java native interface (JNI, which might improve speed
45   * at cost of compatibility)
46   * <li>fast (above average performance) of Java live image display
47   * <li>providing of additional "image enhancement" methods like false colour
48   * mode(s), normalisation
49   * <li>support of JPEG, RGB raw, Grayscale and HuffYUV-compressed grayscale as
50   * input data
51   * <li>robustness (malformed data should do no harm and should be recognizable)
52   * <li>easeness, intuitiveness of use (keep aspect ratio, overlaying of image
53   * metadata, obtain pixel value under cursor, save still image to PNG file,
54   * facility-overridable splash and error screens)
55   * </ul>
56   * The component was designed that it can either be used inside a so-called ACOP
57   * Video Bean as well as inside a standalone application.
58   * 
59   * @author <a href="mailto:stefan.weisse@desy.de">Stefan Weisse</a>
60   * @version $Id: Templates.xml,v 1.10 2008/06/23 14:30:13 sweisse Exp $
61   * 
62   */
63  
64  public final class ImageDisplayer extends Container {
65  
66  	/** enum for color mapping assignment */
67  	public enum ColorMap {
68  		GRAYSCALE, PITZ, JET;
69  
70  		@Override
71  		public String toString() {
72  			switch (this) {
73  			case GRAYSCALE:
74  				return "Grayscale";
75  			case PITZ:
76  				return "PITZ False Color";
77  			case JET:
78  				return "JET False Color";
79  			}
80  			return super.toString();
81  		}
82  	}
83  
84  	/**
85  	 * Enum for overlay information
86  	 * 
87  	 * @author mdavid
88  	 */
89  	public enum OverlayState {
90  		/**
91  		 * overlay information always enabled.
92  		 */
93  		ON("On"),
94  
95  		/**
96  		 * overlay information always disabled.
97  		 */
98  		OFF("Off"),
99  
100 		/**
101 		 * overlay information appears when moving over video frame.
102 		 */
103 		AUTO("Auto");
104 
105 		private final String description;
106 
107 		OverlayState(String description) {
108 			this.description = description;
109 		}
110 
111 		public String getDescription() {
112 			return description;
113 		}
114 	}
115 
116 	private static final long serialVersionUID = 310376L;
117 
118 	/**
119 	 * minimal width of video canvas. This is not taken into account inside the
120 	 * ACOP bean.
121 	 */
122 	private static final int MIN_X = 592;
123 
124 	/**
125 	 * minimal height of video canvas. This is not taken into account inside the
126 	 * ACOP bean.
127 	 */
128 	private static final int MIN_Y = 211; // 127 + 2x42 OSD;
129 
130 	/** Timeout period. See 'reenableAlwaysUpdateVideoCanvas' */
131 	private static final long TIMEOUT = 1000;
132 
133 	/** Flipping Buffer strategy for (fast) Java drawing */
134 	private java.awt.image.BufferStrategy flipStrategy;
135 
136 	/** is Flipping Buffer strategy for (fast) Java drawing already created? */
137 	private boolean isBufferStrategyCreated;
138 
139 	/** underlying drawing canvas */
140 	private Canvas videoCanvas;
141 
142 	/** dedicated synchronization (thread lock) object */
143 	private Object lockHeaderUpdate = new Object();
144 
145 	/** is the mouse arrow inside the overlay section of the video canvas? */
146 	private boolean isVideoCanvasMouseInOverlay;
147 
148 	/** is the mouse arrow inside the video canvas? */
149 	private boolean isVideoCanvasMouseInCanvas;
150 
151 	/** is the mouse arrow inside the video canvas and is the left button down? */
152 	private boolean isVideoCanvasMouseLeftButtonDown;
153 
154 	/**
155 	 * set to true if live transfer is taking place and is not timed out. Set to
156 	 * false if either live transfer is running but timed out or not running
157 	 * (stop mode) at all.
158 	 */
159 	private boolean isLiveTransfer;
160 
161 	/**
162 	 * see description of OverlayState enumarion
163 	 * 
164 	 * @author mdavid
165 	 */
166 	private OverlayState overlayState;
167 
168 	/**
169 	 * zooming out of area of interest (zoom area of interest to full rectangle
170 	 * of component, but still do take account of aspect ratio if it is on)
171 	 */
172 	private boolean isAOIZoom;
173 
174 	/**
175 	 * set to true if aspect ratio should be kept. At the moment, a pixel is
176 	 * known to be square so aspect ratio takes into account the ratio of width
177 	 * vs. height. This might change in future because VSv3 allows to detect and
178 	 * take into account non- squared pixels.
179 	 */
180 	private boolean isKeepAspectRatioEnabled;
181 
182 	/**
183 	 * set to true if histogram equalisation (aka. normalisation, aka. contrast
184 	 * enhancement) should be applied to any image before displayed. Makes sense
185 	 * for dark scenes, very bright scenes or more in general any scene with low
186 	 * contrast. Colour histogram equalisation is numerically well but is
187 	 * outperformed by professional algorithms. This might be improved in
188 	 * future. Same but weaker effect is known for luminosity (greyscale)
189 	 * normalisation.
190 	 */
191 	private boolean isHistogramEqualisation; // was: Normalisation
192 
193 	/**
194 	 * false-colour switching can be disabled because if colour video is
195 	 * displayed there is no meaningful false color translation so switching is
196 	 * meaningless.
197 	 */
198 	private boolean falseColorSwitchingAllowed = true;
199 
200 	/** currently used color map, default is grayscale */
201 	private ColorMap colorMap = ColorMap.GRAYSCALE;
202 
203 	/** matching colour mode for colour map */
204 	private int colorMode = CColorLookupTable.GRAYSCALE;
205 
206 	/** has the false colour table to be updated before next image is shown? */
207 	private boolean isDoUpdateColorLookupTable = true;
208 
209 	/**
210 	 * memory buffer for TYPE_INT_ARGB data before rendered to java.awt.Image
211 	 * and displayed
212 	 */
213 	private int[] displayImageBuffer; // = new int[hdr.width*hdr.height];
214 
215 	/**
216 	 * memory buffer for current false-colour index to TYPE_INT_ARGB conversion
217 	 * table
218 	 */
219 	private int[] colorLookupTableArray; // mapping from
220 
221 	/** class instance for 'colorLookupTableArray' creation and updating */
222 	private CColorLookupTable colorLookupTable;
223 
224 	/**
225 	 * instance that will buffer the special error screen about to be displayed
226 	 * if video image data could not properly be decoded or displayed
227 	 */
228 	private IMAGE errorScreen;
229 
230 	/**
231 	 * last received frame number. This value is updated if a video image is
232 	 * scheduled for displaying. It is reset to -1 on startup of live transfer.
233 	 */
234 	private long lastFrameNumber = -1;
235 
236 	/**
237 	 * total dropped frames counter. This value is updated if a video image is
238 	 * scheduled for displaying. It is reset to zero on startup of live
239 	 * transfer.
240 	 */
241 	private long droppedFrames = 0;
242 
243 	/**
244 	 * total subsequent received frames counter. This value is updated if a
245 	 * video image is scheduled for displaying. It is reset to zero on startup
246 	 * of live transfer.
247 	 */
248 	private long receivedFrames = 0;
249 
250 	/**
251 	 * ratio (percentage) of dropped vs. total number of received frames. This
252 	 * value is updated if a video images is scheduled for displaying. It is
253 	 * reset to "-0.001" (about zero, identified as unset) on startup of live
254 	 * transfer.
255 	 */
256 	private double ratio = -0.001;
257 
258 	/**
259 	 * java.awt.Image instance buffer for rendered video image contents ready
260 	 * for drawing
261 	 */
262 	private Image image; // @jve:decl-index=0:
263 
264 	/**
265 	 * java.awt.MemoryImageSource marshals 'displayImageBuffer' for faster
266 	 * updating of 'image'
267 	 */
268 	private MemoryImageSource memoryImageSource; // @jve:decl-index=0:
269 
270 	/**
271 	 * extracted but frozen video header out of current "about-to-be" displayed
272 	 * frame. Values will not changed if header data is preprocessed (and thus,
273 	 * changed) during image prepare inside this class.
274 	 */
275 	private CVideoHeader3 constHeader;
276 
277 	/**
278 	 * extracted but volatile video header out of current "about-to-be"
279 	 * displayed frame. Values are altered if header data is preprocessed and
280 	 * changed during image prepare inside this class.
281 	 */
282 	private CVideoHeader3 changeHeader;
283 
284 	/**
285 	 * Timeout counter for setting up the GUI for any redrawing (imperformant)
286 	 * if for 'TIMEOUT' milliseconds no new video image was received.
287 	 */
288 	private long reenableAlwaysUpdateVideoCanvas = java.lang.System
289 			.currentTimeMillis();
290 
291 	/**
292 	 * default constructor. Initialises parent class and calls initialize()
293 	 * method for local initialisation.
294 	 */
295 	public ImageDisplayer() {
296 		super();
297 		initialize();
298 	}
299 
300 	@Override
301 	/** overridden general mouse listener. */
302 	public synchronized void addMouseListener(MouseListener l) {
303 		super.addMouseListener(l);
304 		videoCanvas.addMouseListener(l);
305 	}
306 
307 	@Override
308 	/** overridden mouse motion listener. */
309 	public synchronized void addMouseMotionListener(MouseMotionListener l) {
310 		super.addMouseMotionListener(l);
311 		videoCanvas.addMouseMotionListener(l);
312 	}
313 
314 	@Override
315 	/** overridden mouse wheel listener. */
316 	public synchronized void addMouseWheelListener(MouseWheelListener l) {
317 		super.addMouseWheelListener(l);
318 		videoCanvas.addMouseWheelListener(l);
319 	}
320 
321 	/**
322 	 * updates the currently used false colour map. Get/Set. This method will do
323 	 * nothing if the new map equals the current map for performance reasons.
324 	 * After new assignment, drawing is enforced to update all involved parts of
325 	 * code that work with or reference false colours.
326 	 * 
327 	 * @param map
328 	 *            new false colour map to use
329 	 */
330 	public void setColorMap(ColorMap map) {
331 		if (this.colorMap == map)
332 			return;
333 
334 		ColorMap oldMap = this.colorMap;
335 		this.colorMap = map;
336 
337 		// mdavid: optimized
338 		switch (colorMap) {
339 		case GRAYSCALE:
340 			colorMode = CColorLookupTable.GRAYSCALE;
341 			break;
342 
343 		case PITZ:
344 			colorMode = CColorLookupTable.FALSEPITZ;
345 			break;
346 
347 		case JET:
348 			colorMode = CColorLookupTable.FALSEJET;
349 			break;
350 
351 		default:
352 			throw new IllegalArgumentException("Unsupported color map: " + map);
353 		}
354 
355 		isDoUpdateColorLookupTable = true;
356 		firePropertyChange("colorMap", oldMap, this.colorMap);
357 
358 		drawAndUpdate(false);
359 		repaintAnything();
360 	}
361 
362 	/**
363 	 * returns the currently used false colour map to outside world. Get/Set.
364 	 * 
365 	 * @return currently used false colour map
366 	 */
367 	public ColorMap getColorMap() {
368 		return colorMap;
369 	}
370 
371 	/**
372 	 * @author mdavid
373 	 * @param overlay
374 	 *            set overlay mode always on (true) or off(false)
375 	 */
376 	public void setOverlayState(OverlayState state) {
377 		OverlayState oldState = overlayState;
378 		overlayState = state;
379 		firePropertyChange("overlayState", oldState, overlayState);
380 		repaintAnything(); // TODO: mdavid - why here ??
381 	}
382 
383 	/**
384 	 * Returns overlay information state.
385 	 * 
386 	 * @author mdavid
387 	 */
388 	public OverlayState getOverlayState() {
389 		return overlayState;
390 	}
391 
392 	/**
393 	 * Switches 'histogram equalisation' on or off. Get/Set. This method will do
394 	 * nothing if the histogram equalisation mode is already in the state about
395 	 * to be set. After new assignment, drawing as well as repainting is
396 	 * enforced to update all involved parts of code so that histogram
397 	 * equalisation is immediately applied to image.
398 	 * 
399 	 * @param switch
400 	 *            histogram equalisation on (true) or off (false)
401 	 */
402 	public void setHistogramEqualisationEnabled(boolean normalisation) {
403 		if (this.isHistogramEqualisation == normalisation)
404 			return;
405 		this.isHistogramEqualisation = normalisation;
406 		firePropertyChange("histogramEqualisation", !isHistogramEqualisation,
407 				isHistogramEqualisation);
408 
409 		drawAndUpdate(false);
410 		repaintAnything();
411 	}
412 
413 	/**
414 	 * returns whether histogram equalisation (normalisation) is currently
415 	 * enabled. Get/Set.
416 	 * 
417 	 * @return true - histogram equalisation is switched on<br>
418 	 *         false - histogram equalisation is switched off
419 	 */
420 	public boolean isHistogramEqualisationEnabled() {
421 		return isHistogramEqualisation;
422 	}
423 
424 	/**
425 	 * Switches 'AOI zoom' on or off. Get/Set. This method will do nothing if
426 	 * the AOI zoom mode is already in the state about to be set. Repainting is
427 	 * enforced to have an immediate update.
428 	 * 
429 	 * @param zoom
430 	 *            enable (true) or disable (false) AOI zooming
431 	 */
432 	public void setAOIZoom(boolean zoom) {
433 		if (isAOIZoom == zoom)
434 			return;
435 		this.isAOIZoom = zoom;
436 
437 		firePropertyChange("AOIZoom", !isAOIZoom, isAOIZoom);
438 		repaintAnything();
439 	}
440 
441 	/**
442 	 * returns whether outzooming of AOI (no black border around AOI which
443 	 * depicts full frame) is currently enabled. Get/Set.
444 	 * 
445 	 * @return true - AOI Zoom is switched on<br>
446 	 *         false - AOI Zoom is switched off
447 	 */
448 	public boolean isAOIZoom() {
449 		return isAOIZoom;
450 	}
451 
452 	/**
453 	 * Switches 'aspect ratio is kept' on or off. Get/Set. This method will do
454 	 * nothing if the aspect ratio mode is already in the state about to be set.
455 	 * Repainting is enforced to have an immediate update.
456 	 * 
457 	 * @param keep
458 	 *            enable (true) or disable (false) keeping of aspect ratio
459 	 */
460 	public void setKeepAspectRatioEnabled(boolean keep) {
461 		if (this.isKeepAspectRatioEnabled == keep)
462 			return;
463 		this.isKeepAspectRatioEnabled = keep;
464 
465 		firePropertyChange("keepAspectRatioEnabled", !isKeepAspectRatioEnabled,
466 				isKeepAspectRatioEnabled);
467 		repaintAnything();
468 	}
469 
470 	/**
471 	 * returns whether 'aspect ratio is kept' is currently enabled. Get/Set.
472 	 * 
473 	 * @return true - 'aspect ratio is kept' is switched on<br>
474 	 *         false - 'aspect ratio is kept' is switched off
475 	 */
476 	public boolean isKeepAspectRatioEnabled() {
477 		return isKeepAspectRatioEnabled;
478 	}
479 
480 	/**
481 	 * saves the currently displayed image as colour (RGB) png file. An
482 	 * additional text file is saved beside the png file that describes full
483 	 * image metainformation (VSv3 header contents).
484 	 * 
485 	 * @param fileNamePath
486 	 *            Filename of PNG file with extension, may include path
487 	 * @return boolean indicating success (true) or failure (false)
488 	 */
489 	public boolean saveAsPNGImage(String fileNamePath) {
490 		boolean ret;
491 
492 		repaintAnything();
493 
494 		synchronized (lockHeaderUpdate) {
495 			ret = CBasicPNGWriterV3.write(fileNamePath, image, changeHeader);
496 		}
497 
498 		repaintAnything();
499 
500 		return ret;
501 	}
502 
503 	/**
504 	 * Switches 'false colour switching meaningfulness' on or off. Get/Set. This
505 	 * method will do nothing if the meaningfulness of false colour switching is
506 	 * already in the state about to be set. Repainting is enforced to have an
507 	 * immediate update.
508 	 * 
509 	 * The variable behind this get/set was invented to have an indication
510 	 * whether false colour switching is meaningful (false colour data) or not
511 	 * meaningful (colour data). For colour data no proper transformation to
512 	 * false colour is provided (usually it is not necessary simply) so false
513 	 * colour switching would be logically meaningless.
514 	 * 
515 	 * @param enabled
516 	 *            <ul>
517 	 *            <li>true false colour switching is meaningful (allowed)
518 	 *            <li>false switching of false colour is not meaningful
519 	 *            (allowed)
520 	 *            </ul>
521 	 */
522 	public void setFalseColorSwitchingAllowed(boolean enabled) {
523 		if (this.falseColorSwitchingAllowed == enabled)
524 			return;
525 		this.falseColorSwitchingAllowed = enabled;
526 
527 		firePropertyChange("falseColorSwitchingAllowed",
528 				!falseColorSwitchingAllowed, falseColorSwitchingAllowed);
529 		repaintAnything();
530 	}
531 
532 	/**
533 	 * returns whether false colour switching is currently meaningful. Get/Set.
534 	 * 
535 	 * @return true - false colour switching is meaningful (allowed)<br>
536 	 *         false - false colour switching is not meaningful (allowed)
537 	 */
538 	public boolean isFalseColorSwitchingAllowed() {
539 		return falseColorSwitchingAllowed;
540 	}
541 
542 	/**
543 	 * informs the displayer that a lot of frames are constantly to be
544 	 * drawn(true) or it is not very likely that a lot of frames are drawn
545 	 * (timeout on transfer or stop mode).
546 	 * 
547 	 * The method was invented for performance reasons. When a lot of frames are
548 	 * coming in each second it can be a very time-consuming process for Java.
549 	 * In order to offload some performance, certain Java-scheduled redrawing of
550 	 * the component is disabled because it is redrawn with each new video image
551 	 * coming in anyhow.
552 	 * 
553 	 * @param aLive
554 	 *            <ul>
555 	 *            <li>true: live mode is set, a lot of frames are expected
556 	 *            <li>false: either live mode was stopped or timeout happened
557 	 *            </ul>
558 	 */
559 	public void setLiveTransfer(boolean aLive) {
560 		isLiveTransfer = aLive;
561 	}
562 
563 	/**
564 	 * resets important internal parameters. This method must be called from
565 	 * outside (e.g. image provider like TineHandler) in order to prepare the
566 	 * displayer for a new chunk of subsequent images.
567 	 * 
568 	 */
569 	public void resetForReceiving() {
570 		image = null;
571 		memoryImageSource = null;
572 		displayImageBuffer = null;
573 		colorLookupTableArray = null;
574 		isDoUpdateColorLookupTable = true;
575 		lastFrameNumber = -1;
576 		droppedFrames = 0;
577 		receivedFrames = 0;
578 		ratio = -0.001;
579 	}
580 
581 	/**
582 	 * passes/injects a new image into image processing, drawing and redrawing
583 	 * pipeline.
584 	 * 
585 	 * Uses thread locking for proper synchronisation. Drawing and Displaying is
586 	 * done inside invoked Swing thread.
587 	 * 
588 	 * @param aHdr
589 	 *            new video frame (marshalled into CVideoHeader3 instance)
590 	 */
591 	public void updateValue(CVideoHeader3 aHdr) {
592 		synchronized (lockHeaderUpdate) {
593 			constHeader = cloneVideoFrame(aHdr, false);
594 			// TODO this is safe, but slow
595 			changeHeader = cloneVideoFrame(aHdr, true);
596 			calculateDroppedFrames();
597 			uncompressIfNecessary();
598 		}
599 
600 		Runnable r = new Runnable() {
601 			public void run() {
602 				drawAndUpdate(true);
603 			}
604 		};
605 
606 		if (SwingUtilities.isEventDispatchThread()) {
607 			r.run();
608 		} else {
609 			SwingUtilities.invokeLater(r);
610 		}
611 	}
612 
613 	// ///////////////////////////////////////////////////////////////////////////
614 	//
615 	// private methods
616 	//
617 	// ///////////////////////////////////////////////////////////////////////////
618 
619 	/**
620 	 * This method initializes this current instance in order to display video
621 	 * images. Splash screen will be shown if the component is drawn after
622 	 * initialize() was finished.
623 	 * 
624 	 * @return void
625 	 */
626 	private void initialize() {
627 
628 		// initialisation of basic component and subcomponent canvas
629 		this.setSize(559, 387);
630 		this.setMinimumSize(new Dimension(MIN_X, MIN_Y));
631 		this.setLayout(new BorderLayout());
632 
633 		videoCanvas = new Canvas();
634 		videoCanvas.setBackground(Color.darkGray);
635 		videoCanvas.setBounds(5, 5, 200, 200);
636 		videoCanvas.setIgnoreRepaint(true);
637 
638 		// set defined (valid) start-up settings
639 		isLiveTransfer = false;
640 		isDoUpdateColorLookupTable = true;
641 		overlayState = OverlayState.AUTO;
642 
643 		isVideoCanvasMouseInOverlay = false;
644 		isVideoCanvasMouseInCanvas = false;
645 		isVideoCanvasMouseLeftButtonDown = false;
646 		isAOIZoom = false;
647 		isHistogramEqualisation = false;
648 
649 		// create mouse listeners for exiting, entering of canvas and left mouse
650 		// button
651 		// handling
652 		videoCanvas.addMouseListener(new MouseAdapter() {
653 			public void mouseExited(MouseEvent e) {
654 				boolean old = isVideoCanvasMouseInOverlay;
655 				isVideoCanvasMouseInOverlay = false;
656 				isVideoCanvasMouseInCanvas = false;
657 
658 				// DEBUG SW!
659 				// System.out.println("Mouse exited.");
660 
661 				if (old)
662 					repaintAnything();
663 			}
664 
665 			public void mouseEntered(MouseEvent e) {
666 				boolean old = isVideoCanvasMouseInOverlay;
667 				isVideoCanvasMouseInCanvas = true;
668 
669 				// DEBUG SW!
670 				// System.out.println("Mouse entered.");
671 
672 				if (old)
673 					repaintAnything();
674 			}
675 
676 			public void mousePressed(MouseEvent e) {
677 				// if (e.getModifiers() == MouseEvent.BUTTON1_DOWN_MASK)
678 				isVideoCanvasMouseLeftButtonDown = true;
679 
680 				// DEBUG SW!
681 				// System.out.println("Mouse down.");
682 
683 			}
684 
685 			public void mouseReleased(MouseEvent e) {
686 				// if (e.getModifiers() == MouseEvent.BUTTON1_DOWN_MASK)
687 				isVideoCanvasMouseLeftButtonDown = false;
688 
689 				// DEBUG SW!
690 				// System.out.println("Mouse up.");
691 			}
692 
693 		});
694 
695 		// creates mouse motion listener subclass for proper mouse move
696 		// events inside the image drawing canvas if left mouse button is
697 		// pressed
698 		videoCanvas.addMouseMotionListener(new MouseMotionAdapter() {
699 			public void mouseDragged(MouseEvent e) {
700 				// so that the event is also transferred down in case
701 				// of mouse button clicks
702 				onMouseMoveInCanvas(e);
703 			}
704 
705 			public void mouseMoved(MouseEvent e) {
706 				onMouseMoveInCanvas(e);
707 			}
708 		});
709 
710 		// create a component listener for flexible drawing
711 		this.addComponentListener(new ComponentAdapter() {
712 			public void componentResized(ComponentEvent ce) {
713 				repaintAnything();
714 			}
715 
716 			public void componentMoved(ComponentEvent ce) {
717 				repaintAnything();
718 			}
719 
720 			public void componentShown(ComponentEvent ce) {
721 				repaintAnything();
722 			}
723 		});
724 
725 		// create instance of color lookup table
726 		colorLookupTable = new CColorLookupTable();
727 
728 		// create splash screen (java TINE IMAGE type) by calling appropriate
729 		// helper method
730 		IMAGE splashScreen = new IMAGE();
731 		createSplashScreen(splashScreen);
732 
733 		// create error screen (java TINE IMAGE type) by calling appropriate
734 		// helper method
735 		errorScreen = new IMAGE();
736 		createErrorScreen(errorScreen);
737 
738 		// finally, add the canvas to the center of the component
739 		this.add(videoCanvas, BorderLayout.CENTER);
740 
741 		// set the splashScreen as current image to be (re-)drawn
742 		updateValue(new CVideoHeader3(splashScreen));
743 	}
744 
745 	/**
746 	 * loads or creates a splash screen. If a file "splash.png" can be found in
747 	 * the current directory on disk and can be loaded, it is used as splash
748 	 * screen. Otherwise, a basic splash screen is generated which is just a
749 	 * pure black video image.<br>
750 	 * <br>
751 	 * The splash screen is shown when the ImageDisplayer is shown or drawn and
752 	 * updateValue was not called a single time from outside before.
753 	 * 
754 	 * @param aSplashScreen
755 	 *            reference to the IMAGE instance that will store the splash
756 	 *            screen
757 	 */
758 	private void createSplashScreen(IMAGE aSplashScreen) {
759 		if (CIMMLoader.loadImageFile("splash.imm", aSplashScreen) == false) {
760 
761 			if (CBasicImageLoaderV3.loadImageFile("splash.png", aSplashScreen) == false) {
762 
763 				// obtain references:
764 				IMAGE.FrameHeader frameHeader = aSplashScreen.getFrameHeader();
765 				IMAGE.SourceHeader sourceHeader = aSplashScreen
766 						.getSourceHeader();
767 
768 				// set data of frameHeader
769 				frameHeader.aoiHeight = -1;
770 				frameHeader.aoiWidth = -1;
771 				frameHeader.bytesPerPixel = 1;
772 				frameHeader.appendedFrameSize = 768 * 576 * frameHeader.bytesPerPixel;
773 				frameHeader.effectiveBitsPerPixel = 8;
774 				frameHeader.eventNumber = 0;
775 				frameHeader.frameNumber = 0;
776 				frameHeader.fspare1 = (float) -1.0;
777 				frameHeader.fspare2 = (float) -1.0;
778 				frameHeader.fspare3 = (float) -1.0;
779 				frameHeader.horizontalBinning = 0;
780 				frameHeader.imageFlags = (int) CVideoHeader3.CF_IMAGE_FLAG_IMAGE_LOSSLESS
781 						| (int) CVideoHeader3.CF_IMAGE_FLAG_LITTLE_ENDIAN_BYTE_ORDER;
782 				frameHeader.imageFormat = (int) CVideoHeader3.CF_IMAGE_FORMAT_GRAY;
783 				frameHeader.imageRotation = (float) 0.0;
784 				frameHeader.ispare1 = -1;
785 				frameHeader.ispare2 = -1;
786 				frameHeader.ispare3 = -1;
787 				frameHeader.sourceFormat = (int) CVideoHeader3.CF_IMAGE_FORMAT_GRAY;
788 				frameHeader.sourceHeight = 576;
789 				frameHeader.sourceWidth = 768;
790 				frameHeader.verticalBinning = 0;
791 				frameHeader.xScale = (float) 1.000;
792 				frameHeader.xStart = 0;
793 				frameHeader.yScale = (float) 1.000;
794 				frameHeader.yStart = 0;
795 
796 				// set data of sourceHeader
797 				sourceHeader.baseTag = (int) CVideoHeader3.CF_IMAGE_MAGIC_01;
798 				sourceHeader.cameraPortName = "Internal Backup Splashscreen";
799 				// SW! Oct 01, 2008
800 				sourceHeader.cameraPortId = (int) CVideoHeader3.CF_IMAGE_NO_CAMERA_PORT_ID;
801 				java.util.Calendar c = java.util.Calendar.getInstance();
802 				sourceHeader.timestampMicroseconds = (int) (((c
803 						.getTimeInMillis() % ((long) 1000)) * ((long) 1000)));
804 				sourceHeader.timestampSeconds = (int) (((c.getTimeInMillis()) / ((long) 1000)));
805 				sourceHeader.totalLength = CVideoHeader3.HDRSIZE
806 						+ frameHeader.appendedFrameSize;
807 				sourceHeader.versionTag = CVideoHeader3.CF_IMAGE_VERSION;
808 
809 				// create, generate and set video frame buffer
810 
811 				byte[] buf = new byte[frameHeader.appendedFrameSize];
812 
813 				// imperformant, but easy solution:
814 				int pos = 0;
815 				for (int i = 0; i < frameHeader.sourceWidth
816 						* frameHeader.sourceHeight; i++) {
817 					buf[pos++] = (byte) 0;
818 				}
819 				aSplashScreen.setImageFrameBuffer(buf);
820 			}
821 		}
822 	}
823 
824 	/**
825 	 * loads or creates an error screen. If a file "error.png" can be found in
826 	 * the current directory on disk and can be loaded, it is used as error
827 	 * screen. Otherwise, a basic error screen is generated which is a random
828 	 * noise screen (should look like _static_ (paused) white noise in analogue
829 	 * TV.<br>
830 	 * <br>
831 	 * The error screen is shown in case the current video image could not be
832 	 * decoded, displayed or is generally malformed (header bad, ...). A special
833 	 * textual region will be embedded in the noisy error screen stating that
834 	 * the current frame could not be displayed.
835 	 * 
836 	 * @param aErrorScreen
837 	 *            reference to the IMAGE instance that will store the error
838 	 *            screen in memory
839 	 */
840 	private void createErrorScreen(IMAGE aErrorScreen) {
841 		if (CBasicImageLoaderV3.loadImageFile("error.png", aErrorScreen) == false) {
842 
843 			// obtain references:
844 			IMAGE.FrameHeader frameHeader = aErrorScreen.getFrameHeader();
845 			IMAGE.SourceHeader sourceHeader = aErrorScreen.getSourceHeader();
846 
847 			// set data of frameHeader
848 			frameHeader.aoiHeight = -1;
849 			frameHeader.aoiWidth = -1;
850 			frameHeader.appendedFrameSize = 768 * 576;
851 			frameHeader.bytesPerPixel = 1;
852 			frameHeader.effectiveBitsPerPixel = 8;
853 			frameHeader.eventNumber = 0;
854 			frameHeader.frameNumber = 0;
855 			frameHeader.fspare1 = (float) -1.0;
856 			frameHeader.fspare2 = (float) -1.0;
857 			frameHeader.fspare3 = (float) -1.0;
858 			frameHeader.horizontalBinning = 0;
859 			frameHeader.imageFlags = (int) CVideoHeader3.CF_IMAGE_FLAG_IMAGE_LOSSLESS
860 					| (int) CVideoHeader3.CF_IMAGE_FLAG_LITTLE_ENDIAN_BYTE_ORDER;
861 			frameHeader.imageFormat = (int) CVideoHeader3.CF_IMAGE_FORMAT_GRAY;
862 			frameHeader.imageRotation = (float) 0.0;
863 			frameHeader.ispare1 = -1;
864 			frameHeader.ispare2 = -1;
865 			frameHeader.ispare3 = -1;
866 			frameHeader.sourceFormat = (int) CVideoHeader3.CF_IMAGE_FORMAT_GRAY;
867 			frameHeader.sourceHeight = 576;
868 			frameHeader.sourceWidth = 768;
869 			frameHeader.verticalBinning = 0;
870 			frameHeader.xScale = (float) 1.000;
871 			frameHeader.xStart = 0;
872 			frameHeader.yScale = (float) 1.000;
873 			frameHeader.yStart = 0;
874 
875 			// set data of sourceHeader
876 			sourceHeader.baseTag = (int) CVideoHeader3.CF_IMAGE_MAGIC_01;
877 			sourceHeader.cameraPortName = "Internal Backup Splashscreen";
878 			// SW! Oct 01, 2008
879 			//sourceHeader.specificTag = (int) CVideoHeader3.CF_IMAGE_MAGIC_02;
880 			sourceHeader.cameraPortId = (int) CVideoHeader3.CF_IMAGE_NO_CAMERA_PORT_ID;
881 			java.util.Calendar c = java.util.Calendar.getInstance();
882 			sourceHeader.timestampMicroseconds = (int) (((c.getTimeInMillis() % ((long) 1000)) * ((long) 1000)));
883 			sourceHeader.timestampSeconds = (int) (((c.getTimeInMillis()) / ((long) 1000)));
884 			sourceHeader.totalLength = CVideoHeader3.HDRSIZE
885 					+ frameHeader.appendedFrameSize;
886 			sourceHeader.versionTag = CVideoHeader3.CF_IMAGE_VERSION;
887 
888 			// create, generate and set video frame buffer
889 
890 			byte[] buf = new byte[frameHeader.appendedFrameSize];
891 
892 			// imperformant, but easy solution:
893 			int pos = 0;
894 			for (int i = 0; i < frameHeader.sourceWidth
895 					* frameHeader.sourceHeight; i++) {
896 				buf[pos++] = (byte) (Math.random() * 256.0); // grayscale
897 				// noise
898 			}
899 
900 			aErrorScreen.setImageFrameBuffer(buf);
901 		}
902 	}
903 
904 	/**
905 	 * returns instance of AlphaComposite based on passed float alpha type. This
906 	 * is a helper function.
907 	 * 
908 	 * @param alpha
909 	 *            alpha (transparency) value (0 = full transparent,
910 	 *            1=intransparent)
911 	 * @return parameter als AlphaComposite
912 	 */
913 	private AlphaComposite makeComposite(float alpha) {
914 		int type = AlphaComposite.SRC_OVER;
915 		return (AlphaComposite.getInstance(type, alpha));
916 	}
917 
918 	/**
919 	 * helper function called by mouse events catched in subclasses. It handles
920 	 * the proper setting of variables in order to do overlay drawing on demand
921 	 * if mouse arrow is inside proper regions of video canvas. In addition, the
922 	 * overlay is especially redrawn if the mouse arrow is inside the video
923 	 * canvas and the left mouse button is pressed, so that "value under cursor"
924 	 * is properly redrawn.
925 	 * 
926 	 * @param e
927 	 *            MouseEvent about to be analysed
928 	 */
929 	private void onMouseMoveInCanvas(MouseEvent e) {
930 		// int xpos = e.getX();
931 
932 		int ypos = e.getY();
933 
934 		boolean old = isVideoCanvasMouseInOverlay;
935 
936 		if (((ypos > 0) && (ypos < 50))
937 				|| ((ypos > videoCanvas.getHeight() - 50) && (ypos < videoCanvas
938 						.getHeight())))
939 			isVideoCanvasMouseInOverlay = true;
940 
941 		if (((ypos <= 0) || (ypos >= videoCanvas.getHeight())))
942 			isVideoCanvasMouseInOverlay = false;
943 
944 		isVideoCanvasMouseInCanvas = true;
945 
946 		if (old != isVideoCanvasMouseInOverlay)
947 			repaintAnything();
948 
949 		// ---------------------------------------------------
950 
951 		// update if left mouse button if pressed (display of value
952 		// under cursor)
953 		if (isVideoCanvasMouseLeftButtonDown && isVideoCanvasMouseInCanvas) {
954 			// note: high cpu load might be caused!
955 			if (isLiveTransfer == false)
956 				updateImageCanvas();
957 		}
958 	}
959 
960 	/**
961 	 * helper function that does real drawing of image as well as full overlay
962 	 * information inside the video canvas. Takes into account AOI zooming and
963 	 * aspect ratio keeping.
964 	 * 
965 	 * Reads out video buffer for "value under cursor" function. It uses
966 	 * synchronize locking object in order that no important variable is
967 	 * overwritten while drawing is performed.
968 	 * 
969 	 */
970 	private void updateImageCanvas() {
971 
972 		if (constHeader == null || image == null || changeHeader == null) {
973 			// DEBUG SW!
974 			// System.out.println("Display error: one of important values is
975 			// null!");
976 			return;
977 		}
978 
979 		if (isBufferStrategyCreated == false) {
980 			if (isDisplayable()) {
981 				videoCanvas.createBufferStrategy(2);
982 				flipStrategy = videoCanvas.getBufferStrategy();
983 				isBufferStrategyCreated = true;
984 			}
985 		}
986 
987 		if (isBufferStrategyCreated == false) {
988 			// DEBUG SW!
989 			// System.out.println("Buffer Strategy not yet created!");
990 			return;
991 		}
992 
993 		synchronized (lockHeaderUpdate) {
994 
995 			try {
996 
997 				Graphics g = null;
998 				do {
999 					// The following loop ensures that the contents of the
1000 					// drawing
1001 					// buffer
1002 					// are consistent in case the underlying surface was
1003 					// recreated
1004 
1005 					// - values used for calculations or as reference for other
1006 					// values
1007 					// are taken from the changeHdr
1008 
1009 					// - values only used for display are taken from constHdr!
1010 
1011 					do {
1012 						try {
1013 							g = flipStrategy.getDrawGraphics();
1014 							Graphics2D g2d = (Graphics2D) g;
1015 
1016 							videoCanvas.paint(g);
1017 
1018 							java.awt.Rectangle imgR = new java.awt.Rectangle();
1019 							java.awt.Rectangle srcR = new java.awt.Rectangle();
1020 							java.awt.Rectangle dstR = new java.awt.Rectangle();
1021 
1022 							java.awt.Rectangle dstROrig = new java.awt.Rectangle();
1023 
1024 							imgR.x = 0;
1025 							imgR.y = 0;
1026 							imgR.width = changeHeader.frameHeader.sourceWidth;
1027 							imgR.height = changeHeader.frameHeader.sourceHeight;
1028 
1029 							srcR.x = changeHeader.frameHeader.xStart;
1030 							srcR.y = changeHeader.frameHeader.yStart;
1031 							srcR.width = changeHeader.frameHeader.aoiWidth;
1032 							srcR.height = changeHeader.frameHeader.aoiHeight;
1033 							if (srcR.width == -1)
1034 								srcR.width = imgR.width;
1035 							if (srcR.height == -1)
1036 								srcR.height = imgR.height;
1037 
1038 							dstROrig.x = 2;
1039 							dstROrig.y = 2;
1040 							dstROrig.width = videoCanvas.getWidth() - 4;
1041 							dstROrig.height = videoCanvas.getHeight() - 4;
1042 
1043 							dstR.x = 2;
1044 							dstR.y = 2;
1045 							dstR.width = videoCanvas.getWidth() - 4;
1046 							dstR.height = videoCanvas.getHeight() - 4;
1047 
1048 							if (isKeepAspectRatioEnabled) {
1049 								double whratiovideo = 1.0;
1050 								if (imgR.height > 0)
1051 									whratiovideo = ((double) imgR.width)
1052 											/ ((double) imgR.height);
1053 
1054 								for (;;) {
1055 									dstR.height = (int) Math.round((dstR.width)
1056 											/ whratiovideo);
1057 									if (dstR.height > dstROrig.height) {
1058 										dstR.width--;
1059 										continue;
1060 									} else if (dstR.width <= dstROrig.width)
1061 										break;
1062 									else
1063 										dstR.width = dstROrig.width - 1;
1064 								}
1065 
1066 								dstR.x = dstROrig.x
1067 										+ ((dstROrig.width - dstR.width) / 2);
1068 								dstR.y = dstROrig.y
1069 										+ ((dstROrig.height - dstR.height) / 2);
1070 							}
1071 
1072 							// after: now width and height are adjusted properly
1073 							// to
1074 							// canvas for whole big image
1075 							if ((isAOIZoom == false)
1076 									&& ((srcR.width != imgR.width) || (srcR.height != imgR.height))) {
1077 								g2d.setColor(Color.black);
1078 								g2d.fillRect(dstR.x, dstR.y, dstR.width,
1079 										dstR.height);
1080 							}
1081 
1082 							if (isAOIZoom) {
1083 
1084 								if (isKeepAspectRatioEnabled) {
1085 									int widthaoi = srcR.width;
1086 									int heightaoi = srcR.height;
1087 
1088 									double whratioaoi = 1.0;
1089 									if (heightaoi > 0)
1090 										whratioaoi = ((double) widthaoi)
1091 												/ ((double) heightaoi);
1092 
1093 									// heightaoi = height;
1094 									widthaoi = dstROrig.width;
1095 
1096 									for (;;) {
1097 										heightaoi = (int) Math.round((widthaoi)
1098 												/ whratioaoi);
1099 										if (heightaoi > dstROrig.height) {
1100 											widthaoi--;
1101 											continue;
1102 										} else if (widthaoi <= dstROrig.width)
1103 											break;
1104 										else {
1105 											widthaoi = dstROrig.width;
1106 										}
1107 									}
1108 
1109 									dstR.width = widthaoi;
1110 									dstR.height = heightaoi;
1111 
1112 								}
1113 
1114 								dstR.x = dstROrig.x
1115 										+ ((dstROrig.width - dstR.width) / 2);
1116 								dstR.y = dstROrig.y
1117 										+ ((dstROrig.height - dstR.height) / 2);
1118 							} else {
1119 								double wscale_full = (dstR.width * 1.0
1120 										/ imgR.width * 1.0);
1121 								double hscale_full = (dstR.height * 1.0
1122 										/ imgR.height * 1.0);
1123 
1124 								dstR.width = (int) Math.round(srcR.width
1125 										* wscale_full);
1126 								dstR.height = (int) Math.round(srcR.height
1127 										* hscale_full);
1128 
1129 								dstR.x += ((int) (srcR.x * wscale_full));
1130 								dstR.y += ((int) (srcR.y * hscale_full));
1131 							}
1132 
1133 							g2d.drawImage(image, dstR.x, dstR.y, dstR.width,
1134 									dstR.height, null);
1135 
1136 							if (changeHeader.sourceHeader.cameraPortName == null) {
1137 								String s1 = "Error: Video Frame could not be decoded or displayed properly.";
1138 								String s2 = "# "
1139 										+ constHeader.frameHeader.frameNumber;
1140 
1141 								Font f = g2d.getFont();
1142 								Font f1 = f.deriveFont(Font.BOLD, (float) 12.0);
1143 								g2d.setFont(f1);
1144 
1145 								int maxAreaWidth = g2d.getFontMetrics()
1146 										.stringWidth(s1);
1147 
1148 								if (g2d.getFontMetrics().stringWidth(s2) > maxAreaWidth)
1149 									maxAreaWidth = g2d.getFontMetrics()
1150 											.stringWidth(s2);
1151 
1152 								maxAreaWidth += 40;
1153 								int maxAreaHeight = g2d.getFontMetrics()
1154 										.getHeight() * 2 + 20;
1155 
1156 								g2d.setColor(Color.black);
1157 								g2d
1158 										.fillRect(
1159 												((videoCanvas.getWidth() - maxAreaWidth) / 2),
1160 												(videoCanvas.getHeight() - maxAreaHeight) / 2,
1161 												maxAreaWidth, maxAreaHeight);
1162 
1163 								g2d.setColor(Color.red);
1164 								g2d
1165 										.drawString(
1166 												s1,
1167 												((videoCanvas.getWidth() - g2d
1168 														.getFontMetrics()
1169 														.stringWidth(s1)) / 2),
1170 												((videoCanvas.getHeight() - maxAreaHeight) / 2) + 20);
1171 
1172 								g2d.setColor(Color.white);
1173 								g2d
1174 										.drawString(
1175 												s2,
1176 												((videoCanvas.getWidth() - g2d
1177 														.getFontMetrics()
1178 														.stringWidth(s2)) / 2),
1179 												((videoCanvas.getHeight() - maxAreaHeight) / 2)
1180 														+ 25
1181 														+ g2d.getFontMetrics()
1182 																.getHeight());
1183 
1184 								g2d.setFont(f);
1185 
1186 							}
1187 
1188 							String sAddPixelValue = "";
1189 
1190 							if ((overlayState == OverlayState.ON)
1191 									|| (overlayState == OverlayState.AUTO && isVideoCanvasMouseInOverlay)) {
1192 								if (isVideoCanvasMouseInCanvas
1193 										&& isVideoCanvasMouseLeftButtonDown) {
1194 									sAddPixelValue = "";
1195 									try {
1196 										Point p = videoCanvas
1197 												.getMousePosition();
1198 										if (p != null) {
1199 											int pixelindex = 0;
1200 
1201 											sAddPixelValue += "  (out of dimension)";
1202 
1203 											if (((p.x >= dstR.x) && (p.x <= (dstR.x + dstR.width)))
1204 													&& ((p.y >= dstR.y) && (p.y <= (dstR.y + dstR.height)))) {
1205 												// do coordinate transform, src,
1206 												// dst
1207 
1208 												double dst2srcWidthRelation = 1.0
1209 														* srcR.width
1210 														/ dstR.width;
1211 												double dst2srcHeightRelation = 1.0
1212 														* srcR.height
1213 														/ dstR.height;
1214 
1215 												int pixelx = (int) ((1.0 * (p.x - dstR.x)) * dst2srcWidthRelation);
1216 												int pixely = (int) ((1.0 * (p.y - dstR.y)) * dst2srcHeightRelation);
1217 												if (pixelx >= srcR.width)
1218 													pixelx = srcR.width - 1;
1219 												if (pixely >= srcR.height)
1220 													pixely = srcR.height - 1;
1221 
1222 												pixelindex = (srcR.width
1223 														* pixely + pixelx);
1224 
1225 												sAddPixelValue = "  px("
1226 														+ (pixelx + 1 + srcR.x)
1227 														+ ","
1228 														+ (pixely + 1 + srcR.y)
1229 														+ ") = ";
1230 
1231 												if (changeHeader.frameHeader.imageFormat == CVideoHeader3.CF_IMAGE_FORMAT_GRAY) {
1232 													byte[] rawBuffer = changeHeader
1233 															.getImage()
1234 															.getImageFrameBuffer();
1235 													long pxvalue = ((long) rawBuffer[pixelindex
1236 															* changeHeader.frameHeader.bytesPerPixel]) & 0xff;
1237 													if (changeHeader.frameHeader.bytesPerPixel > 1)
1238 														pxvalue |= (((long) rawBuffer[pixelindex
1239 																* changeHeader.frameHeader.bytesPerPixel
1240 																+ 1]) & 0xff) << 8;
1241 													if (changeHeader.frameHeader.bytesPerPixel > 2)
1242 														pxvalue |= (((long) rawBuffer[pixelindex
1243 																* changeHeader.frameHeader.bytesPerPixel
1244 																+ 2]) & 0xff) << 16;
1245 													if (changeHeader.frameHeader.bytesPerPixel > 3)
1246 														pxvalue |= (((long) rawBuffer[pixelindex
1247 																* changeHeader.frameHeader.bytesPerPixel
1248 																+ 3]) & 0xff) << 24;
1249 
1250 													int maxpxvalue = (1 << changeHeader.frameHeader.effectiveBitsPerPixel) - 1;
1251 
1252 													sAddPixelValue += pxvalue
1253 															+ " (out of "
1254 															+ maxpxvalue + ")";
1255 												} else if (changeHeader.frameHeader.imageFormat == CVideoHeader3.CF_IMAGE_FORMAT_RGB) {
1256 													int pixel = displayImageBuffer[pixelindex];
1257 													long redvalue = ((long) pixel >> 16) & 0xff;
1258 													long greenvalue = ((long) pixel >> 8) & 0xff;
1259 													long bluevalue = ((long) pixel) & 0xff;
1260 
1261 													sAddPixelValue += "("
1262 															+ redvalue + "/"
1263 															+ greenvalue + "/"
1264 															+ bluevalue + ")";
1265 												} else if (changeHeader.frameHeader.imageFormat == CVideoHeader3.CF_IMAGE_FORMAT_RGBA) {
1266 													int pixel = displayImageBuffer[pixelindex];
1267 													long redvalue = ((long) pixel >> 16) & 0xff;
1268 													long greenvalue = ((long) pixel >> 8) & 0xff;
1269 													long bluevalue = ((long) pixel) & 0xff;
1270 
1271 													sAddPixelValue += "("
1272 															+ redvalue + "/"
1273 															+ greenvalue + "/"
1274 															+ bluevalue + ")";
1275 												} else if (changeHeader.frameHeader.imageFormat == CVideoHeader3.CF_IMAGE_FORMAT_JPEG) {
1276 													int pixel = displayImageBuffer[pixelindex];
1277 													long redvalue = ((long) pixel >> 16) & 0xff;
1278 													long greenvalue = ((long) pixel >> 8) & 0xff;
1279 													long bluevalue = ((long) pixel) & 0xff;
1280 
1281 													sAddPixelValue += "("
1282 															+ redvalue + "/"
1283 															+ greenvalue + "/"
1284 															+ bluevalue + ")";
1285 												} else {
1286 													sAddPixelValue += "(unknown)";
1287 												}
1288 											} // if (p.x >= ...)
1289 										} // if (p!= null)
1290 									} catch (HeadlessException ex1) {
1291 										// ex.printStackTrace();
1292 										// DEBUG SW!
1293 									}
1294 								}
1295 							}
1296 
1297 							if (overlayState == OverlayState.ON || //
1298 									(overlayState == OverlayState.AUTO && isVideoCanvasMouseInOverlay)) {
1299 
1300 								int canvasWidth = videoCanvas.getWidth();
1301 								int canvasHeight = videoCanvas.getHeight();
1302 
1303 								Dimension canvasDim = new Dimension(
1304 										canvasWidth, canvasHeight);
1305 								Point inset = new Point(10, 16);
1306 
1307 								Composite orgComposite = g2d.getComposite();
1308 								g2d.setColor(Color.lightGray);
1309 								g2d.setComposite(makeComposite(0.85F));
1310 								g2d.fillRect(2, 2, canvasWidth - 4, 40);
1311 								g2d.fillRect(2, canvasHeight - 42,
1312 										canvasWidth - 4, 40);
1313 								g2d.setComposite(orgComposite);
1314 
1315 								g2d.setColor(Color.black);
1316 
1317 								DateFormat df = new SimpleDateFormat(
1318 										"HH:mm:ss.SSS");
1319 								FrameHeader frameHdr = constHeader.frameHeader;
1320 
1321 								String leftStr, rightStr;
1322 								leftStr = "Source: "
1323 										+ constHeader.sourceHeader.cameraPortName
1324 										+ " ("
1325 										+ CVideoHeader3
1326 												.formatToString(frameHdr.sourceFormat)
1327 										+ ")";
1328 								rightStr = "# "
1329 										+ frameHdr.frameNumber
1330 										+ " - "
1331 										+ df.format(constHeader
1332 												.getTimestampAsDate());
1333 								drawCanvasString(g2d, canvasDim, leftStr,
1334 										rightStr, new Point(0, 0), inset);
1335 
1336 								leftStr = "Size: " + frameHdr.sourceWidth
1337 										+ " px * " + frameHdr.sourceHeight
1338 										+ " px * "
1339 										+ frameHdr.effectiveBitsPerPixel
1340 										+ " of " + frameHdr.bytesPerPixel * 8
1341 										+ " bpp ";
1342 
1343 								if (frameHdr.aoiWidth != -1
1344 										|| frameHdr.aoiHeight != -1)
1345 									leftStr += "(AOI: L " + frameHdr.xStart
1346 											+ ", T " + frameHdr.yStart + ", W "
1347 											+ frameHdr.aoiWidth + ", H "
1348 											+ frameHdr.aoiHeight + ")";
1349 								else
1350 									leftStr += "(AOI: none)";
1351 
1352 								rightStr = "Drop: "
1353 										+ droppedFrames
1354 										+ " ("
1355 										+ String.format("%4.3f", Double
1356 												.valueOf(ratio * 100.0)) + "%)";
1357 								drawCanvasString(g2d, canvasDim, leftStr,
1358 										rightStr, new Point(0, 18), inset);
1359 								leftStr = "Format: "
1360 										+ CVideoHeader3
1361 												.formatToString(frameHdr.imageFormat)
1362 										+ sAddPixelValue;
1363 								drawCanvasString(g2d, canvasDim, leftStr, null,
1364 										new Point(0, canvasHeight - 42), inset);
1365 
1366 								leftStr = "Flags: "
1367 										+ CVideoHeader3
1368 												.flagsToString(frameHdr.imageFlags);
1369 
1370 								double rotation = frameHdr.imageRotation;
1371 								rightStr = "Rotation: "
1372 										+ String.format("%4.2f", Double
1373 												.valueOf(Math.abs(rotation)))
1374 										+ "%";
1375 								if (rotation > 0.0)
1376 									rightStr += " cw";
1377 								else if (rotation < 0.0)
1378 									rightStr += " ccw";
1379 
1380 								drawCanvasString(g2d, canvasDim, leftStr,
1381 										rightStr, new Point(0,
1382 												canvasHeight - 24), inset);
1383 
1384 							}
1385 
1386 						} catch (Exception ex) {
1387 							// ex.printStackTrace();
1388 							// DEBUG SW!
1389 							// "NullPointerException at getDrawGraphics"
1390 						} finally {
1391 							if (g != null) {
1392 								g.dispose();
1393 							}
1394 						}
1395 					} while (flipStrategy.contentsRestored());
1396 
1397 					// Display the buffer
1398 					flipStrategy.show();
1399 
1400 					// Repeat the rendering if the drawing buffer was lost
1401 				} while (flipStrategy.contentsLost());
1402 
1403 			} catch (Exception ex) {
1404 				// DEBUG SW!
1405 				// ex.printStackTrace();
1406 				// "Exception in thread "AWT-EventQueue-0"
1407 				// java.lang.NullPointerException"
1408 				// at flipStrategy.show() or flipStrategy.contentsLost()
1409 			}
1410 
1411 		}
1412 		// synchronized(lockHeaderUpdate)
1413 
1414 	}
1415 
1416 	/**
1417 	 * @author mdavid
1418 	 */
1419 	private void drawCanvasString(Graphics2D g2d, Dimension canvas, //
1420 			String strL, String strR, Point pos, Point inset) {
1421 
1422 		FontMetrics fm = g2d.getFontMetrics();
1423 		int strLWidth = fm.stringWidth(strL) + inset.x;
1424 		drawCanvasString(strL, canvas, g2d, fm, pos, inset);
1425 		drawCanvasString(strR, canvas, g2d, fm, new Point(-strLWidth, pos.y),
1426 				inset);
1427 	}
1428 
1429 	/**
1430 	 * @author mdavid
1431 	 */
1432 	private void drawCanvasString(String str, Dimension canvas, Graphics2D g2d,
1433 			FontMetrics fm, Point pos, Point inset) {
1434 
1435 		if (str == null || str.length() == 0)
1436 			return;
1437 
1438 		boolean isRight = (pos.x < 0);
1439 		if (isRight)
1440 			pos.x = -pos.x;
1441 
1442 		int area = canvas.width - pos.x;
1443 		int strWidth = fm.stringWidth(str) + inset.x;
1444 
1445 		if (strWidth + inset.x > area) {
1446 			String suffix = "...";
1447 			int suffixWidth = fm.stringWidth(suffix) + inset.x;
1448 			strWidth = inset.x + suffixWidth;
1449 
1450 			StringBuilder sb = new StringBuilder();
1451 			for (int i = 0; i < str.length(); i++) {
1452 				if (strWidth >= area)
1453 					break;
1454 				sb.append(str.charAt(i));
1455 				strWidth = inset.x + fm.stringWidth(sb.toString())
1456 						+ suffixWidth;
1457 			}
1458 			g2d.drawString(sb.append(suffix).toString(), pos.x + inset.x, pos.y
1459 					+ inset.y);
1460 
1461 		} else
1462 			g2d.drawString(str, (isRight ? canvas.width - strWidth : pos.x
1463 					+ inset.x), pos.y + inset.y);
1464 	}
1465 
1466 	/**
1467 	 * helper function in oder to redraw contents of GUI and video canvas
1468 	 * display properly on-demand.
1469 	 * 
1470 	 * If more than one second no new video image came in (updateValue), the
1471 	 * video canvas is redrawn, too. Otherwise it is not redrawn due to
1472 	 * performance reasons.
1473 	 * 
1474 	 */
1475 	private void repaintAnything() {
1476 
1477 		// more than one second no new image came in:
1478 		// disable live transfer for this call only; update everything else
1479 		boolean shouldnotdraw = isLiveTransfer;
1480 
1481 		if (java.lang.System.currentTimeMillis() > reenableAlwaysUpdateVideoCanvas)
1482 			shouldnotdraw = false;
1483 
1484 		if (shouldnotdraw == false) {
1485 			updateImageCanvas();
1486 		}
1487 
1488 		super.repaint();
1489 	}
1490 
1491 	/**
1492 	 * uncompresses the image data if necessary.
1493 	 * 
1494 	 * @return
1495 	 * <ul>
1496 	 * <li>true in case of successful decompression or no decompression was
1497 	 * necessary
1498 	 * <li>false error on decompression
1499 	 * </ul>
1500 	 */
1501 	private boolean uncompressIfNecessary() {
1502 		byte[] compressedBuf = null;
1503 		int pixellength = changeHeader.getAppendedWidth()
1504 				* changeHeader.getAppendedHeight();
1505 		int uncompressed_byte_length = pixellength
1506 				* changeHeader.frameHeader.bytesPerPixel;
1507 
1508 		if ((changeHeader.frameHeader.imageFormat == CVideoHeader3.CF_IMAGE_FORMAT_HUFFYUV)
1509 				&& (changeHeader.frameHeader.sourceFormat == CVideoHeader3.CF_IMAGE_FORMAT_GRAY)) {
1510 
1511 			compressedBuf = new byte[changeHeader.frameHeader.appendedFrameSize];
1512 
1513 			System.arraycopy(changeHeader.getImage().getImageFrameBuffer(), 0,
1514 					compressedBuf, 0,
1515 					changeHeader.frameHeader.appendedFrameSize);
1516 
1517 			CHuffmanDecompression.DecompressHuffYUV(compressedBuf,
1518 					0,// CVideoHeader3.HDRSIZE
1519 					changeHeader.getImage().getImageFrameBuffer(), 0,
1520 					uncompressed_byte_length);
1521 
1522 			compressedBuf = null;
1523 
1524 			// adjust header after compression
1525 			changeHeader.frameHeader.imageFormat = (int) CVideoHeader3.CF_IMAGE_FORMAT_GRAY;
1526 			changeHeader.sourceHeader.totalLength = uncompressed_byte_length
1527 					+ CVideoHeader3.HDRSIZE;
1528 			changeHeader.frameHeader.appendedFrameSize = uncompressed_byte_length;
1529 		}
1530 
1531 		return true;
1532 	}
1533 
1534 	/**
1535 	 * calculates dropped frames and dropped frames percentage for each new
1536 	 * subsequent incoming frame.<br>
1537 	 * <br>
1538 	 * The following class member variables might be updated:
1539 	 * <ul>
1540 	 * <li>droppedFrames
1541 	 * <li>ratio
1542 	 * <li>receivedFrames
1543 	 * <li>lastFrameNumber
1544 	 * </ul>
1545 	 */
1546 	private void calculateDroppedFrames() {
1547 		// calculate variables:
1548 		// droppedFrames
1549 		// ratio
1550 		// receivedFrames
1551 		//
1552 		// by this, the following variables might be updated:
1553 		// lastFrameNumber
1554 
1555 		if (lastFrameNumber == -1) {
1556 			lastFrameNumber = changeHeader.frameHeader.frameNumber;
1557 			droppedFrames = 0;
1558 			receivedFrames++;
1559 		} else {
1560 			if (changeHeader.frameHeader.frameNumber == lastFrameNumber) {
1561 				// TODO this case should be subject to some error output
1562 				// SW! DEBUG
1563 				// System.out.println("Theyre same: " +
1564 				// changeHeader.frameHeader.frameNumber + " " + lastFrameNumber
1565 				// + " !");
1566 
1567 			} else if (changeHeader.frameHeader.frameNumber < lastFrameNumber) {
1568 				receivedFrames++;
1569 			} else {
1570 				droppedFrames += (changeHeader.frameHeader.frameNumber
1571 						- lastFrameNumber - 1);
1572 				receivedFrames++;
1573 			}
1574 			lastFrameNumber = changeHeader.frameHeader.frameNumber;
1575 
1576 		}
1577 
1578 		if (receivedFrames != 0)
1579 			ratio = ((1.0 * droppedFrames) / (1.0 * (receivedFrames)));
1580 	}
1581 
1582 	/**
1583 	 * renders the display image buffer. Creates (if not exist) a
1584 	 * MemoryImageSource for easy and fast transformation from
1585 	 * 'displayImageBuffer' to java.awt.Image and forces transformation by
1586 	 * calling memoryImageSource.newPixels().
1587 	 * 
1588 	 * @return
1589 	 * <ul>
1590 	 * <li>true in case rendering was successful
1591 	 * <li>false rendering to java.awt.Image was not successful
1592 	 * </ul>
1593 	 */
1594 	private boolean renderDisplayImageBuffer() {
1595 		// (3) java: create or renew memory image source and create java
1596 		// awt.Image out of it
1597 
1598 		if (memoryImageSource == null) {
1599 			memoryImageSource = new MemoryImageSource(changeHeader
1600 					.getAppendedWidth(), changeHeader.getAppendedHeight(),
1601 					displayImageBuffer, 0, changeHeader.getAppendedWidth());
1602 			memoryImageSource.setAnimated(true);
1603 			memoryImageSource.setFullBufferUpdates(true);
1604 			image = null;
1605 		}
1606 
1607 		if (image == null) {
1608 			image = Toolkit.getDefaultToolkit().createImage(memoryImageSource);
1609 		}
1610 
1611 		memoryImageSource.newPixels();
1612 
1613 		return true;
1614 	}
1615 
1616 	/**
1617 	 * redraws the current image. A meaningful display buffer is created or
1618 	 * reused. In case of error, the error image is passed along. This display
1619 	 * buffer is rendered. <br>
1620 	 * If the parameter is set to true, the video canvas is redrawn, too.
1621 	 * 
1622 	 * @param aUpdateGraphics
1623 	 *            <ul>
1624 	 *            <li>true redraw image in canvas
1625 	 *            <li>false do updating of graphics, just "redraw" inside
1626 	 *            pipeline
1627 	 *            </ul>
1628 	 */
1629 	private void drawAndUpdate(boolean aUpdateGraphics) {
1630 		synchronized (lockHeaderUpdate) {
1631 			if (createDisplayBuffer(changeHeader) == false) {
1632 				changeHeader = new CVideoHeader3(errorScreen);
1633 				changeHeader.sourceHeader.cameraPortName = null;
1634 				createDisplayBuffer(changeHeader);
1635 			}
1636 			renderDisplayImageBuffer();
1637 		}
1638 
1639 		if (aUpdateGraphics) {
1640 			reenableAlwaysUpdateVideoCanvas = java.lang.System
1641 					.currentTimeMillis()
1642 					+ TIMEOUT;
1643 			updateImageCanvas(); // draw & display
1644 		}
1645 	}
1646 
1647 	/**
1648 	 * clones (1:1 copy) a video frame header and bits. It is used to have an
1649 	 * unchanged video header for meta information printing and a changed header
1650 	 * and bits for displaying later on.
1651 	 * 
1652 	 * 
1653 	 * @param aSrc
1654 	 *            source video image marshalled as CVideoHeader3
1655 	 * @param aFrameDataToo
1656 	 *            <ul>
1657 	 *            <li>true if the frame data (image bits) should be cloned, too
1658 	 *            <li>false if the image bits will not be cloned (much faster,
1659 	 *            less memory consumption)
1660 	 *            </ul>
1661 	 * @return cloned aSrc
1662 	 */
1663 	private CVideoHeader3 cloneVideoFrame(CVideoHeader3 aSrc,
1664 			boolean aFrameDataToo) {
1665 		// TODO there is a better way to do it
1666 
1667 		IMAGE img = new IMAGE();
1668 		IMAGE.FrameHeader frameHeader = img.getFrameHeader();
1669 		IMAGE.SourceHeader sourceHeader = img.getSourceHeader();
1670 
1671 		frameHeader.aoiHeight = aSrc.frameHeader.aoiHeight;
1672 		frameHeader.aoiWidth = aSrc.frameHeader.aoiWidth;
1673 		frameHeader.appendedFrameSize = aSrc.frameHeader.appendedFrameSize;
1674 		frameHeader.bytesPerPixel = aSrc.frameHeader.bytesPerPixel;
1675 		frameHeader.effectiveBitsPerPixel = aSrc.frameHeader.effectiveBitsPerPixel;
1676 		frameHeader.eventNumber = aSrc.frameHeader.eventNumber;
1677 		frameHeader.frameNumber = aSrc.frameHeader.frameNumber;
1678 		frameHeader.fspare1 = aSrc.frameHeader.fspare1;
1679 		frameHeader.fspare2 = aSrc.frameHeader.fspare2;
1680 		frameHeader.fspare3 = aSrc.frameHeader.fspare3;
1681 		frameHeader.horizontalBinning = aSrc.frameHeader.horizontalBinning;
1682 		frameHeader.imageFlags = aSrc.frameHeader.imageFlags;
1683 		frameHeader.imageFormat = aSrc.frameHeader.imageFormat;
1684 		frameHeader.imageRotation = aSrc.frameHeader.imageRotation;
1685 		frameHeader.ispare1 = aSrc.frameHeader.ispare1;
1686 		frameHeader.ispare2 = aSrc.frameHeader.ispare2;
1687 		frameHeader.ispare3 = aSrc.frameHeader.ispare3;
1688 		frameHeader.sourceFormat = aSrc.frameHeader.sourceFormat;
1689 		frameHeader.sourceHeight = aSrc.frameHeader.sourceHeight;
1690 		frameHeader.sourceWidth = aSrc.frameHeader.sourceWidth;
1691 		frameHeader.verticalBinning = aSrc.frameHeader.verticalBinning;
1692 		frameHeader.xScale = aSrc.frameHeader.xScale;
1693 		frameHeader.xStart = aSrc.frameHeader.xStart;
1694 		frameHeader.yScale = aSrc.frameHeader.yScale;
1695 		frameHeader.yStart = aSrc.frameHeader.yStart;
1696 
1697 		sourceHeader.baseTag = aSrc.sourceHeader.baseTag;
1698 		//sourceHeader.specificTag = aSrc.sourceHeader.specificTag;
1699 		sourceHeader.cameraPortId = aSrc.sourceHeader.cameraPortId;
1700 		sourceHeader.cameraPortName = new String(
1701 				aSrc.sourceHeader.cameraPortName);
1702 		//sourceHeader.specificTag = aSrc.sourceHeader.specificTag;
1703 		sourceHeader.timestampMicroseconds = aSrc.sourceHeader.timestampMicroseconds;
1704 		sourceHeader.timestampSeconds = aSrc.sourceHeader.timestampSeconds;
1705 		sourceHeader.totalLength = aSrc.sourceHeader.totalLength;
1706 		sourceHeader.versionTag = aSrc.sourceHeader.versionTag;
1707 
1708 		if (aFrameDataToo)
1709 			img.setImageFrameBuffer((byte[]) aSrc.getImage()
1710 					.getImageFrameBuffer().clone());
1711 		else
1712 			img.setImageFrameBuffer(new byte[1]); // TODO enhance this dummy
1713 		// hack
1714 
1715 		CVideoHeader3 temp = new CVideoHeader3(img);
1716 		return temp;
1717 	}
1718 
1719 	/**
1720 	 * Apply RGB colour histogram equalisation to the display image buffer. This
1721 	 * method is used for JPEG, RGB and ARGB input data types.<br>
1722 	 * <br>
1723 	 * <b>Note: </b>Quality of current colour histogram equalisation algorithm
1724 	 * is not very good and subject to further improvement.
1725 	 * 
1726 	 * @return true in case normalisation was applied false in case
1727 	 *         normalisation was not applied (no error, already good image,
1728 	 *         normalisation not necessary (possible) )
1729 	 */
1730 	private boolean normalizeDisplayImageBuffer() {
1731 		// this function is used/necessary for rgb color image normalisation
1732 		//
1733 		// TODO the current normalisation algorithm makes the image a little bit
1734 		// too
1735 		// bright (clipping occurs)
1736 		// IDEA: maybe use photoshop algorithm:
1737 		// http://www.lonestardigital.com/autocontrast.htm
1738 		// display image buffer contains ARGB
1739 
1740 		// (1) calculate min and max of Y (luminosity) from RGB value(s) of this
1741 		// buffer
1742 		double minY = 256.0;
1743 		double maxY = 0;
1744 		int x = 0;
1745 
1746 		try {
1747 			for (;;) {
1748 				int pixel = displayImageBuffer[x++];
1749 
1750 				int r = (pixel >> 16) & 0xff;
1751 				int g = (pixel >> 8) & 0xff;
1752 				int b = pixel & 0xff;
1753 
1754 				double Y = 0.299 * r + 0.587 * g + 0.114 * b;
1755 				if (Y < minY)
1756 					minY = Y;
1757 				if (Y > maxY)
1758 					maxY = Y;
1759 			}
1760 		} catch (ArrayIndexOutOfBoundsException ex) {
1761 			// DEBUG SW!
1762 			// ex.printStackTrace();
1763 		}
1764 
1765 		// (2) create scaling factor and offset for stretching
1766 
1767 		int lowest = (int) minY;
1768 		int highest = (int) maxY;
1769 
1770 		if (lowest == highest)
1771 			return false;
1772 
1773 		double step = (255.0) / ((double) (maxY - minY));
1774 		double offset = -1.0 * step * minY;
1775 
1776 		// (3) render stretched display image buffer
1777 
1778 		x = 0;
1779 		try {
1780 			for (;;) {
1781 				int pixel = displayImageBuffer[x];
1782 
1783 				int r = (pixel >> 16) & 0xff;
1784 				int g = (pixel >> 8) & 0xff;
1785 				int b = pixel & 0xff;
1786 
1787 				double Y = 0.299 * r + 0.587 * g + 0.114 * b;
1788 				double Ynew = Y * step + offset;
1789 				double YnewdY = Ynew / Y;
1790 
1791 				int r2 = (int) (YnewdY * r);
1792 				int g2 = (int) (YnewdY * g);
1793 				int b2 = (int) (YnewdY * b);
1794 
1795 				if (r2 < 0)
1796 					r2 = 0;
1797 				if (g2 < 0)
1798 					g2 = 0;
1799 				if (b2 < 0)
1800 					b2 = 0;
1801 				if (r2 > 255)
1802 					r2 = 255;
1803 				if (g2 > 255)
1804 					g2 = 255;
1805 				if (b2 > 255)
1806 					b2 = 255;
1807 
1808 				displayImageBuffer[x++] = 0xFF000000 | (r2 << 16) | (g2 << 8)
1809 						| (b2);
1810 			}
1811 		} catch (ArrayIndexOutOfBoundsException ex) {
1812 			// DEBUG SW!
1813 			// ex.printStackTrace();
1814 
1815 		}
1816 
1817 		return true;
1818 	}
1819 
1820 	/**
1821 	 * updates JAVA TYPE_INT_ARGB displayImageBuffer based on grayscale data
1822 	 * passed as parameter. In addition, false colour normalisation is applied.
1823 	 * The normalisation is numerically precise and quite good, however
1824 	 * consideration of algorithm modification is considered for future.<br>
1825 	 * 
1826 	 * @param aHdr
1827 	 *            marshalled greyscale image and header
1828 	 * @return
1829 	 *            <ul>
1830 	 *            <li>false - no update was done, error on creation or
1831 	 *            conversion
1832 	 *            <li>true - success
1833 	 *            </ul>
1834 	 * 
1835 	 */
1836 	private boolean createDisplayBufferFromGREY(CVideoHeader3 aHdr) {
1837 		if (aHdr.frameHeader.imageFormat != CVideoHeader3.CF_IMAGE_FORMAT_GRAY) {
1838 			return false;
1839 		}
1840 
1841 		byte[] inBuf = aHdr.getImage().getImageFrameBuffer();
1842 		final int pixellength = aHdr.getAppendedHeight()
1843 				* aHdr.getAppendedWidth();
1844 
1845 		// enable switching of false color table if data format is gray and the
1846 		// switching
1847 		// was not enabled before
1848 		if (falseColorSwitchingAllowed == false) {
1849 			// enable false color switching in gui
1850 			setFalseColorSwitchingAllowed(true);
1851 		}
1852 
1853 		// perform adjustment of color lookup table, if necessary
1854 
1855 		// force updating of color lookup table here in special case
1856 		if ((colorLookupTableArray == null)
1857 				|| (colorLookupTableArray.length != (1 << (aHdr.frameHeader.bytesPerPixel * 8))))
1858 			isDoUpdateColorLookupTable = true;
1859 
1860 		// if color lookup table update is pending, do it NOW!
1861 		if (isDoUpdateColorLookupTable) {
1862 			boolean ret = colorLookupTable.AdjustTable(colorMode,
1863 					aHdr.frameHeader.bytesPerPixel,
1864 					aHdr.frameHeader.effectiveBitsPerPixel);
1865 			if (ret || colorLookupTableArray == null)
1866 				colorLookupTableArray = colorLookupTable.getColorLookupTable();
1867 			isDoUpdateColorLookupTable = false;
1868 		}
1869 
1870 		final int maxcol = (1 << aHdr.frameHeader.effectiveBitsPerPixel) - 1;
1871 
1872 		// if normalisation is switched on, perform
1873 		// histogram equalisation on single-z-dimension (e.g. luminosity only)
1874 		// data
1875 
1876 		if (isHistogramEqualisation) {
1877 			// with histogram equalisation (normalize upper and lower)
1878 			int lowest = maxcol;
1879 			int highest = 0;
1880 
1881 			// (1) perform getting of lowest and highest intensity value
1882 			// distinct for each supported bytes per pixel setting (1, 2, 3)
1883 			if (aHdr.frameHeader.bytesPerPixel == 1) {
1884 				try {
1885 					int index = 0; // performance-tip
1886 					while (index < pixellength) {
1887 						int pixel = (inBuf[index] & maxcol);
1888 						if (pixel < lowest)
1889 							lowest = pixel;
1890 						if (pixel > highest && pixel <= maxcol)
1891 							highest = pixel;
1892 						index++;
1893 					}
1894 				} catch (ArrayIndexOutOfBoundsException ex) {
1895 					// DEBUG SW!
1896 					// ex.printStackTrace();
1897 				}
1898 			} else if (aHdr.frameHeader.bytesPerPixel == 2) {
1899 				try {
1900 					int index = 0; // performance-tip
1901 					int bytelength = pixellength * 2;
1902 					while (index < bytelength) {
1903 						int pixel = ((inBuf[index]) & 0xff)
1904 								+ (((inBuf[index + 1]) & 0xff) << 8);
1905 
1906 						if (pixel < lowest)
1907 							lowest = pixel;
1908 						if (pixel > highest && pixel <= maxcol)
1909 							highest = pixel;
1910 						index += 2;
1911 					}
1912 				} catch (ArrayIndexOutOfBoundsException ex) {
1913 					// DEBUG SW!
1914 					// ex.printStackTrace();
1915 				}
1916 			} else // 3 bytes per pixel
1917 			{
1918 				try {
1919 					int index = 0; // performance-tip
1920 					int bytelength = pixellength * 3;
1921 					while (index < bytelength) {
1922 						int pixel = ((inBuf[index]) & 0xff)
1923 								+ (((inBuf[index + 1]) & 0xff) << 8)
1924 								+ (((inBuf[index + 2]) & 0xff) << 16);
1925 
1926 						if (pixel < lowest)
1927 							lowest = pixel;
1928 						if (pixel > highest && pixel <= maxcol)
1929 							highest = pixel;
1930 
1931 						index += 3;
1932 					}
1933 				} catch (ArrayIndexOutOfBoundsException ex) {
1934 					// DEBUG SW!
1935 					// ex.printStackTrace();
1936 				}
1937 			}
1938 
1939 			// only rescale if it is necessary(!)
1940 			if (lowest != highest)
1941 
1942 			{ // rescale the inBuf
1943 				// note: affects saving, as we save wysiwyg
1944 
1945 				// TODO: found on aug01, 2008: affects not only saving, which is
1946 				// logical, but also switching it off again (which cannot be
1947 				// seen
1948 				// by updated image in canvas because _source data_ was altered
1949 
1950 				// calculate factor and offset for scaling
1951 				double step = ((double) maxcol) / ((double) (highest - lowest));
1952 				int offset = (int) (-1.0 * step * lowest);
1953 
1954 				// (1) perform conversion of data (span over whole z-dimension)
1955 				// distinct for each supported bytes per pixel setting (1, 2, 3)
1956 				if (aHdr.frameHeader.bytesPerPixel == 1) {
1957 					try {
1958 						int index = 0; // performance-tip
1959 						final int bytelength = pixellength * 1;
1960 						while (index < bytelength) {
1961 							int pixel = (inBuf[index] & 0xff);
1962 
1963 							if (pixel <= maxcol)
1964 								pixel = ((int) ((step * (double) pixel)))
1965 										+ offset;
1966 
1967 							inBuf[index] = (byte) (pixel);
1968 							index++;
1969 						}
1970 					} catch (ArrayIndexOutOfBoundsException ex) {
1971 						// DEBUG SW!
1972 						// ex.printStackTrace();
1973 					}
1974 				} else if (aHdr.frameHeader.bytesPerPixel == 2) {
1975 					try {
1976 						int index = 0; // performance-tip
1977 						final int bytelength = pixellength * 2;
1978 						while (index < bytelength) {
1979 							int pixel = ((inBuf[index]) & 0xff)
1980 									+ (((inBuf[index + 1]) & 0xff) << 8);
1981 
1982 							if (pixel <= maxcol)
1983 								pixel = ((int) ((step * (double) pixel)))
1984 										+ offset;
1985 
1986 							inBuf[index] = (byte) (pixel & 0xff);
1987 							inBuf[index + 1] = (byte) ((pixel >> 8) & 0xff);
1988 
1989 							index += 2;
1990 						}
1991 					} catch (ArrayIndexOutOfBoundsException ex) {
1992 						// DEBUG SW!
1993 						// ex.printStackTrace();
1994 					}
1995 				} else // 3 bytes per pixel
1996 				{
1997 					try {
1998 						int index = 0; // performance-tip
1999 						final int bytelength = pixellength * 3;
2000 						while (index < bytelength) {
2001 							int pixel = ((inBuf[index]) & 0xff)
2002 									+ (((inBuf[index + 1]) & 0xff) << 8)
2003 									+ (((inBuf[index + 2]) & 0xff) << 16);
2004 
2005 							if (pixel <= maxcol)
2006 								pixel = ((int) ((step * (double) pixel)))
2007 										+ offset;
2008 
2009 							inBuf[index] = (byte) (pixel & 0xff);
2010 							inBuf[index + 1] = (byte) ((pixel >> 8) & 0xff);
2011 							inBuf[index + 2] = (byte) ((pixel >> 16) & 0xff);
2012 
2013 							index += 3;
2014 						}
2015 					} catch (ArrayIndexOutOfBoundsException ex) {
2016 						// DEBUG SW!
2017 						// ex.printStackTrace();
2018 					}
2019 				} // for different bits per pixel
2020 			} // lowest != highest
2021 		} // apply normalisation
2022 
2023 		// (2) convert to DisplayImageBuffer using false-color table
2024 		// in other words: transform greyscale to ARGB32 values for java display
2025 		// distinct for each supported bytes per pixel setting (1, 2, 3)
2026 		if (aHdr.frameHeader.bytesPerPixel == 1) {
2027 			try {
2028 				int index = 0; // performance-tip
2029 				for (;;) {
2030 					displayImageBuffer[index] = colorLookupTableArray[inBuf[index] & 0xff];
2031 					index++;
2032 				}
2033 			} catch (ArrayIndexOutOfBoundsException ex) {
2034 				// DEBUG SW!
2035 				// ex.printStackTrace();
2036 			}
2037 		} else if (aHdr.frameHeader.bytesPerPixel == 2) {
2038 			try {
2039 				int index = 0; // performance-tip
2040 				for (;;) {
2041 					int pixel = ((inBuf[2 * index]) & 0xff)
2042 							+ (((inBuf[2 * index + 1]) & 0xff) << 8);
2043 
2044 					displayImageBuffer[index] = colorLookupTableArray[pixel];
2045 					index++;
2046 				}
2047 			} catch (ArrayIndexOutOfBoundsException ex) {
2048 				// DEBUG SW!
2049 				// ex.printStackTrace();
2050 			}
2051 		} else // 3 bytes per pixel
2052 		{
2053 			try {
2054 				int index = 0; // performance-tip
2055 				for (;;) {
2056 					int pixel = ((inBuf[3 * index]) & 0xff)
2057 							+ (((inBuf[3 * index + 1]) & 0xff) << 8)
2058 							+ (((inBuf[3 * index + 2]) & 0xff) << 16);
2059 
2060 					displayImageBuffer[index] = colorLookupTableArray[pixel];
2061 					index++;
2062 				}
2063 			} catch (ArrayIndexOutOfBoundsException ex) {
2064 				// DEBUG SW!
2065 				// ex.printStackTrace();
2066 			}
2067 		}
2068 
2069 		return true;
2070 	}
2071 
2072 	/**
2073 	 * updates JAVA TYPE_INT_ARGB displayImageBuffer based on colour data and
2074 	 * header passed as parameter. In addition, colour normalisation is applied
2075 	 * on demand.<br>
2076 	 * 
2077 	 * @param aHdr
2078 	 *            marshalled RGB (24bpp r-g-b) image data and header
2079 	 * @return
2080 	 *            <ul>
2081 	 *            <li>false - no update was done, error on creation or
2082 	 *            conversion
2083 	 *            <li>true - success
2084 	 *            </ul>
2085 	 * 
2086 	 */
2087 	private boolean createDisplayBufferFromRGB(CVideoHeader3 aHdr) {
2088 		if (aHdr.frameHeader.imageFormat != CVideoHeader3.CF_IMAGE_FORMAT_RGB)
2089 			return false;
2090 
2091 		if (falseColorSwitchingAllowed) {
2092 			// disable false color switching in gui
2093 			setFalseColorSwitchingAllowed(false);
2094 		}
2095 		byte[] inBuf = aHdr.getImage().getImageFrameBuffer();
2096 
2097 		// transform RGB24 color into ARGB32 color for java awt.Image
2098 		int index = 0;
2099 		int index1 = 0;
2100 
2101 		try {
2102 			for (;;) {
2103 				int red16 = inBuf[index1++] & 0xFF;
2104 				int green8 = inBuf[index1++] & 0xFF;
2105 				int blue = inBuf[index1++] & 0xFF;
2106 
2107 				displayImageBuffer[index++] = 0xFF000000 | (red16 << 16)
2108 						| (green8 << 8) | (blue);
2109 				//  
2110 			}
2111 		} catch (ArrayIndexOutOfBoundsException ex) {
2112 			// DEBUG SW!
2113 			// ex.printStackTrace();
2114 		}
2115 
2116 		if (isHistogramEqualisation)
2117 			normalizeDisplayImageBuffer();
2118 
2119 		return true;
2120 	}
2121 
2122 	/**
2123 	 * updates JAVA TYPE_INT_ARGB displayImageBuffer based on colour data and
2124 	 * header passed as parameter. In addition, colour normalisation is applied
2125 	 * on demand.<br>
2126 	 * 
2127 	 * @param aHdr
2128 	 *            marshalled RGB (32bpp a-r-g-b) image data and header, alpha is
2129 	 *            <b>not</b> taken into account
2130 	 * @return
2131 	 *            <ul>
2132 	 *            <li>false - no update was done, error on creation or
2133 	 *            conversion
2134 	 *            <li>true - success
2135 	 *            </ul>
2136 	 * 
2137 	 */
2138 	private boolean createDisplayBufferFromARGB(CVideoHeader3 aHdr) {
2139 		if (aHdr.frameHeader.imageFormat != CVideoHeader3.CF_IMAGE_FORMAT_RGBA)
2140 			return false;
2141 
2142 		if (falseColorSwitchingAllowed) {
2143 			// disable false color switching in gui
2144 			setFalseColorSwitchingAllowed(false);
2145 		}
2146 
2147 		int pixellength = aHdr.getAppendedHeight() * aHdr.getAppendedWidth();
2148 		byte[] inBuf = aHdr.getImage().getImageFrameBuffer();
2149 
2150 		System.arraycopy(java.nio.ByteBuffer.wrap(inBuf).asIntBuffer(), 0,
2151 				displayImageBuffer, 0, pixellength);
2152 
2153 		if (isHistogramEqualisation)
2154 			normalizeDisplayImageBuffer();
2155 
2156 		return true;
2157 	}
2158 
2159 	/**
2160 	 * updates JAVA TYPE_INT_ARGB displayImageBuffer based on JPEG file bits
2161 	 * data and header passed as parameter. In addition, colour normalisation is
2162 	 * applied on demand.<br>
2163 	 * 
2164 	 * @param aHdr
2165 	 *            marshalled JPEG file bits (whole file attached) image data and
2166 	 *            header
2167 	 * 
2168 	 * @return
2169 	 * <ul>
2170 	 * <li>false - no update was done, error on creation or conversion
2171 	 * <li>true - success
2172 	 * </ul>
2173 	 * 
2174 	 */
2175 	private boolean createDisplayBufferFromJPEG(CVideoHeader3 aHdr) {
2176 		if (aHdr.frameHeader.imageFormat != CVideoHeader3.CF_IMAGE_FORMAT_JPEG)
2177 			return false;
2178 
2179 		if (falseColorSwitchingAllowed) {
2180 			// disable false color switching in gui
2181 			setFalseColorSwitchingAllowed(false);
2182 		}
2183 
2184 		// decode Java image
2185 		BufferedImage jimg = null;
2186 		try {
2187 			Image i1 = Toolkit.getDefaultToolkit().createImage(
2188 					aHdr.getImage().getImageFrameBuffer());
2189 			try {
2190 				MediaTracker tracker = new MediaTracker(this);
2191 				tracker.addImage(i1, 0);
2192 				tracker.waitForID(0);
2193 			} catch (InterruptedException ex) {
2194 				// DEBUG SW!
2195 				// ex.printStackTrace();
2196 				return false;
2197 			}
2198 
2199 			jimg = new BufferedImage(aHdr.getAppendedWidth(), aHdr
2200 					.getAppendedHeight(), BufferedImage.TYPE_INT_ARGB);
2201 
2202 			Graphics2D bufImageGraphics = jimg.createGraphics();
2203 			bufImageGraphics.drawImage(i1, 0, 0, null);
2204 		} catch (Exception ex) {
2205 			// DEBUG SW!
2206 			// ex.printStackTrace();
2207 			return false;
2208 		}
2209 
2210 		// get ARGB data from java image into display buffer
2211 		int imgWidth = jimg.getWidth();
2212 		int imgHeight = jimg.getHeight();
2213 
2214 		if (imgWidth != aHdr.getAppendedWidth())
2215 			return false;
2216 		if (imgHeight != aHdr.getAppendedHeight())
2217 			return false;
2218 
2219 		if (jimg.getRGB(0, 0, imgWidth, imgHeight, displayImageBuffer, 0,
2220 				imgWidth) == null)
2221 			return false;
2222 
2223 		if (isHistogramEqualisation)
2224 			normalizeDisplayImageBuffer();
2225 
2226 		return true;
2227 	}
2228 
2229 	/**
2230 	 * Takes a snapshot of the currently displayed image.
2231 	 * 
2232 	 * @return a snapshot of the current image
2233 	 */
2234 	public Image getSnapshotImage() {
2235 		synchronized (lockHeaderUpdate) {
2236 			if (image == null)
2237 				return null;
2238 
2239 			BufferedImage bf = new BufferedImage(image.getWidth(this), image
2240 					.getHeight(this), BufferedImage.TYPE_INT_RGB);
2241 			bf.getGraphics().drawImage(image, 0, 0, this);
2242 			return bf;
2243 		}
2244 	}
2245 
2246 	/**
2247 	 * updates JAVA TYPE_INT_ARGB displayImageBuffer based on any input data and
2248 	 * header the image displayer is capable of processing. In addition,
2249 	 * normalisation of given input data type is applied on demand.<br>
2250 	 * 
2251 	 * @param aHdr
2252 	 *            RGB, ARGB, JPEG or Luminosity (Grayscale) image data and
2253 	 *            header
2254 	 * @return
2255 	 *            <ul>
2256 	 *            <li>false - no update was done, error on creation or
2257 	 *            conversion
2258 	 *            <li>true - success
2259 	 *            </ul>
2260 	 * 
2261 	 */
2262 	private boolean createDisplayBuffer(CVideoHeader3 aHdr) {
2263 		// (1) if
2264 
2265 		int iFmt = aHdr.frameHeader.imageFormat;
2266 		// int sFmt = changeHeader.frameHeader.sourceFormat;
2267 
2268 		// if ((iFmt != CVideoHeader3.CF_IMAGE_FORMAT_GRAY) &&
2269 		// && (sFmt != CVideoHeader3.CF_IMAGE_FORMAT_GRAY)
2270 
2271 		if ((iFmt != CVideoHeader3.CF_IMAGE_FORMAT_GRAY)
2272 				&& (iFmt != CVideoHeader3.CF_IMAGE_FORMAT_HUFFYUV)
2273 				&& (iFmt != CVideoHeader3.CF_IMAGE_FORMAT_RGB)
2274 				&& (iFmt != CVideoHeader3.CF_IMAGE_FORMAT_RGBA)
2275 				&& (iFmt != CVideoHeader3.CF_IMAGE_FORMAT_JPEG)) {
2276 			return false;
2277 		}
2278 
2279 		int pixellength = aHdr.getAppendedWidth() * aHdr.getAppendedHeight();
2280 
2281 		// (1) readjust storage location for display image buffer if length
2282 		// differs
2283 
2284 		if (displayImageBuffer == null) {
2285 			displayImageBuffer = new int[pixellength];
2286 			memoryImageSource = null; // invalidate!
2287 		} else if (displayImageBuffer.length != pixellength) {
2288 			displayImageBuffer = new int[pixellength];
2289 			memoryImageSource = null; // invalidate!
2290 		}
2291 
2292 		// perform branch based on image format
2293 		if (aHdr.frameHeader.imageFormat == CVideoHeader3.CF_IMAGE_FORMAT_GRAY)
2294 			return createDisplayBufferFromGREY(aHdr);
2295 		else if (aHdr.frameHeader.imageFormat == CVideoHeader3.CF_IMAGE_FORMAT_RGB)
2296 			return createDisplayBufferFromRGB(aHdr);
2297 		else if (aHdr.frameHeader.imageFormat == CVideoHeader3.CF_IMAGE_FORMAT_RGBA)
2298 			return createDisplayBufferFromARGB(aHdr);
2299 		else if (aHdr.frameHeader.imageFormat == CVideoHeader3.CF_IMAGE_FORMAT_JPEG)
2300 			return createDisplayBufferFromJPEG(aHdr);
2301 		else {
2302 			return false;
2303 		}
2304 	}
2305 
2306 }