This project is read-only.

Problem in rendering Video on Marker

Jul 22, 2010 at 4:26 PM

Hi,
I am using Goblin XNA and trying to show video file on the Marker thru Augmented Reality. I am able to hear only sound of the Video ..

My Code is as follows:

------------------------------------------------------------------------------

using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Audio;
using Microsoft.Xna.Framework.Content;
using Microsoft.Xna.Framework.GamerServices;
using Microsoft.Xna.Framework.Graphics;
using Microsoft.Xna.Framework.Input;
using Microsoft.Xna.Framework.Media;
using Microsoft.Xna.Framework.Net;
using Microsoft.Xna.Framework.Storage;
using GoblinXNA.SceneGraph;
using GoblinXNA.Graphics;
using GoblinXNA;
using GoblinXNA.Device.Capture;
using GoblinXNA.Device.Vision.Marker;
using GoblinXNA.Graphics.Geometry;
using GoblinXNA.Physics;
using GoblinXNA.Sounds;
using GoblinXNA.UI.UI2D;

namespace AnimatedXNA_AR
{
/// <summary>
/// This is the main type for your game
/// </summary>
public class AnimatedGame : Microsoft.Xna.Framework.Game
{
GraphicsDeviceManager graphics;

Scene scene;

Video video;
VideoPlayer player;
Texture2D videoTexture;


AnimatedModel animatedModel;
TransformNode modelTransNode;
float elapsedTime = 0;
MarkerNode groundMarkerNode, toolbarMarkerNode;
GeometryNode modelNode;
GeometryNode groundNode;

public AnimatedGame()
{
graphics = new GraphicsDeviceManager(this);
Content.RootDirectory = "Content";

}



/// <summary>
/// Allows the game to perform any initialization it needs to before starting to run.
/// This is where it can query for any required services and load any non-graphic
/// related content. Calling base.Initialize will enumerate through any components
/// and initialize them as well.
/// </summary>
protected override void Initialize()
{
// Initialize the GoblinXNA framework
State.InitGoblin(graphics, Content, "");

// Initialize the scene graph
scene = new Scene(this);

#if !USE_ARTAG
State.ThreadOption = (ushort)ThreadOptions.MarkerTracking;
#endif
// Use the newton physics engine to perform collision detection
scene.PhysicsEngine = new NewtonPhysics();




// Use per pixel lighting for better quality (If you using non NVidia graphics card,
// setting this to true may reduce the performance significantly)
scene.PreferPerPixelLighting = true;
// Make the physics simulation space larger
// to 500x500x500 centered at the origin
((NewtonPhysics)scene.PhysicsEngine).WorldSize =
new BoundingBox(Vector3.One * -250, Vector3.One * 250);
// Increase the gravity
scene.PhysicsEngine.Gravity = 30.0f;

((NewtonPhysics)scene.PhysicsEngine).MaxSimulationSubSteps = 5;

// Set up the camera, which defines the eye location and viewing frustum
//CreateCamera();
SetupMarkerTracking();

// Set up the lights used in the scene
CreateLights();



GoblinXNA.Graphics.Model m = new GoblinXNA.Graphics.Model();
spriteBatch = new SpriteBatch(GraphicsDevice);

video = Content.Load<Video>("Bear");
player = new VideoPlayer();

//CreateGround();


// Create a ground for the skinned model to walk around

LoadVideo();



// Use per pixel lighting for better quality (If you using non NVidia graphics card,
// setting this to true may reduce the performance significantly)
scene.PreferPerPixelLighting = true;

State.ShowFPS = true;

base.Initialize();
}

private void LoadVideo()
{

if (player.State == MediaState.Stopped)
{
player.IsLooped = false;
player.Play(video);
}

player.Stop();

GeometryNode videoNode = new GeometryNode("Ground");
videoNode.Model = new Box(80, 80, 0.1f);

Material dominoMaterial = new Material();
dominoMaterial.Diffuse = new Vector4(1.5f, 1.5f, 1.5f, 1.0f);
dominoMaterial.Specular = Color.White.ToVector4();
dominoMaterial.SpecularPower = 10;

modelTransNode = new TransformNode();
modelTransNode.Translation = new Vector3(0, 0, 0);
modelTransNode.Scale = new Vector3(0.5f, 0.5f, 0.5f);
modelTransNode.Rotation = Quaternion.CreateFromAxisAngle(new Vector3(5, 5, 5), MathHelper.ToRadians(10));

#if USE_ARTAG
groundMarkerNode = new MarkerNode(scene.MarkerTracker, "ground");
#else
// Create an array to hold a list of marker IDs that are used in the marker
// array configuration (even though these are already specified in the configuration
// file, ALVAR still requires this array)
int[] ids = new int[28];
for (int i = 0; i < ids.Length; i++)
ids[i] = i;

groundMarkerNode = new MarkerNode(scene.MarkerTracker, "ALVARGroundArray.txt", ids);
#endif

if (player != null)
{
if (player.State != MediaState.Stopped)
{
videoTexture = player.GetTexture();
}
}

dominoMaterial.Texture = player.GetTexture();
videoNode.Material = dominoMaterial;


scene.RootNode.AddChild(groundMarkerNode);
groundMarkerNode.AddChild(modelTransNode);
modelTransNode.AddChild(videoNode);

}




private void SetupMarkerTracking()
{
// Create our video capture device that uses DirectShow library. Note that
// the combinations of resolution and frame rate that are allowed depend on
// the particular video capture device. Thus, setting incorrect resolution
// and frame rate values may cause exceptions or simply be ignored, depending
// on the device driver. The values set here will work for a Microsoft VX 6000,
// and many other webcams.
IVideoCapture captureDevice = null;

//if (useStaticImage)
//{
// captureDevice = new NullCapture();
// captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._800x600,
// ImageFormat.R8G8B8_24, false);

// ((NullCapture)captureDevice).StaticImageFile = "testImage800x600.jpg";
//}
//else
//{
captureDevice = new DirectShowCapture();
captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._640x480,
ImageFormat.R8G8B8_24, false);
//}

// Add this video capture device to the scene so that it can be used for
// the marker tracker
scene.AddVideoCaptureDevice(captureDevice);

IMarkerTracker tracker = null;

#if USE_ARTAG
// Create an optical marker tracker that uses ARTag library
tracker = new ARTagTracker();
// Set the configuration file to look for the marker specifications
tracker.InitTracker(638.052f, 633.673f, captureDevice.Width,
captureDevice.Height, false, "ARTag.cf");
#else
// Create an optical marker tracker that uses ALVAR library
tracker = new ALVARMarkerTracker();
((ALVARMarkerTracker)tracker).MaxMarkerError = 0.02f;
tracker.InitTracker(captureDevice.Width, captureDevice.Height, "calib.xml", 9.0);
#endif

// Set the marker tracker to use for our scene
scene.MarkerTracker = tracker;

// Display the camera image in the background. Note that this parameter should
// be set after adding at least one video capture device to the Scene class.
scene.ShowCameraImage = true;
}


private void CreateLights()
{
// Create a directional light source
LightSource lightSource = new LightSource();
lightSource.Direction = new Vector3(-1, -1, -1);
lightSource.Diffuse = Color.White.ToVector4();

LightSource lightSource2 = new LightSource();
lightSource2.Direction = new Vector3(1, 0, 0);
lightSource2.Diffuse = Color.White.ToVector4();

LightSource lightSource3 = new LightSource();
lightSource3.Direction = new Vector3(-0.5f, 0, 1);
lightSource3.Diffuse = new Vector4(0.5f, 0.5f, 0.5f, 1);

// Create a light node to hold the light source
LightNode lightNode = new LightNode();
lightNode.LightSources.Add(lightSource);
lightNode.LightSources.Add(lightSource2);
lightNode.LightSources.Add(lightSource3);

scene.RootNode.AddChild(lightNode);
}

private void CreateGround()
{


groundNode = new GeometryNode("Ground");

#if USE_ARTAG
groundNode.Model = new Box(85, 66, 0.1f);
#else
groundNode.Model = new Box(95, 59, 0.1f);
#endif

// Set this ground model to act as an occluder so that it appears transparent
groundNode.IsOccluder = true;

// Make the ground model to receive shadow casted by other objects with
// CastShadows set to true
groundNode.Model.ReceiveShadows = true;

Material groundMaterial = new Material();
groundMaterial.Diffuse = Color.Gray.ToVector4();
groundMaterial.Specular = Color.White.ToVector4();
groundMaterial.SpecularPower = 20;

groundNode.Material = groundMaterial;

groundMarkerNode.AddChild(groundNode);
}


/// <summary>
/// Allows the game to run logic such as updating the world,
/// checking for collisions, gathering input, and playing audio.
/// </summary>
/// <param name="gameTime">Provides a snapshot of timing values.</param>
protected override void Update(GameTime gameTime)
{
if (player.State == MediaState.Stopped)
{
player.IsLooped = true;
player.Play(video);
}

base.Update(gameTime);

}



/// <summary>
/// This is called when the game should draw itself.
/// </summary>
/// <param name="gameTime">Provides a snapshot of timing values.</param>
protected override void Draw(GameTime gameTime)
{

videoTexture = player.GetTexture();
base.Draw(gameTime);


}
SpriteBatch spriteBatch;
}
}

Jul 23, 2010 at 9:12 AM
Edited Jul 23, 2010 at 9:14 AM

I did it for UNI project. I am not an UNI student any more. I don't have my source any more.

I think you need to make 2D or 3D object to display image of video.

Then you need to make material value to set material of 2D or 3D object you made.

Like this :

Material screenMat = new Material();

Then, you need to set material value like this :

screenMat.Diffuse = Color.Gray.ToVector4();
screenMat.Specular = Color.White.ToVector4();
screenMat.SpecularPower = 20;

and

screenMat.textures(?) = videoTexture;

The important thing is that

<screenMat.textures(?) = videoTexture;>

this code must be with <videoTexture = player.GetTexture();>

So that, videoTexure can be updataed while the video is playing.

(I can't remember everything;;. I think this is all~!! I hope you can understand my english~(I am not english people;))