on cards with 16 bit (or less) depth buffers.
char *glRenderer;
char *glVersion;
int glMaxTexSize;
+ int glDepthBits;
// Last frame rate measurement
int frame_rate;
inline void set_glVersion( char *str ) { glVersion = str; }
inline void set_glMaxTexSize( int i ) { glMaxTexSize = i; }
inline int get_glMaxTexSize() const { return glMaxTexSize; }
+ inline void set_glDepthBits( int d ) { glDepthBits = d; }
+ inline int get_glDepthBits() const { return glDepthBits; }
inline double get_frame_rate() const { return frame_rate; }
#ifdef FANCY_FRAME_COUNTER
inline double get_frame(int idx) const { return frames[idx]; }
general.set_glVendor( (char *)glGetString ( GL_VENDOR ) );
general.set_glRenderer( (char *)glGetString ( GL_RENDERER ) );
general.set_glVersion( (char *)glGetString ( GL_VERSION ) );
+ FG_LOG( FG_GENERAL, FG_INFO, general.get_glRenderer() );
+
int tmp;
glGetIntegerv( GL_MAX_TEXTURE_SIZE, &tmp );
general.set_glMaxTexSize( tmp );
- FG_LOG ( FG_GENERAL, FG_INFO, general.get_glRenderer() );
FG_LOG ( FG_GENERAL, FG_INFO, "Max texture size = " << tmp );
+ glGetIntegerv( GL_DEPTH_BITS, &tmp );
+ general.set_glDepthBits( tmp );
+ FG_LOG ( FG_GENERAL, FG_INFO, "Depth buffer bits = " << tmp );
+
#if 0
// try to determine if we should adjust the initial default
// display resolution. The options class defaults (is
#include <simgear/debug/logstream.hxx>
#include <Aircraft/aircraft.hxx>
+#include <Include/general.hxx>
#include <Main/globals.hxx>
#include <Scenery/scenery.hxx>
#include <Time/light.hxx>
sgCopyVec3( to, sgTrans );
double dist = sgLengthVec3( to );
- sgScaleVec3( up, 10.0 + agl / 100.0 + dist / 10000 );
+ if ( general.get_glDepthBits() > 16 ) {
+ sgScaleVec3( up, 10.0 + agl / 100.0 + dist / 10000 );
+ } else {
+ sgScaleVec3( up, 10.0 + agl / 20.0 + dist / 5000 );
+ }
sgAddVec3( sgTrans, up );
lights_transform->setTransform( sgTrans );