I am trying to develop an augmented reality iOS app in Unity with Vuforia SDK. I am struggling with trying to make my GameObjects touchable.
Currently, I am able to get my GameObjects to hover over my markers as expected. However, when tapping on them, nothing happens.
Here is my hierarchy.
Now, I've been browsing forums and online tutorials for how to do this and here is the code I have so far.
I have two C# scripts: touchableManager (attached to ARCamera) and touchableGameobject (attached to Cube and Cube)
touchableManager:
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
public class touchableManager : MonoBehaviour
{
public LayerMask touchInputMask;
private List<GameObject> touchList = new List<GameObject>();
private GameObject[] touchesOld;
private RaycastHit hit;
void Update ()
{
if (Input.touchCount > 0)
{
touchesOld = new GameObject[touchList.Count];
touchList.CopyTo(touchesOld);
touchList.Clear();
foreach (Touch touch in Input.touches)
{
Ray ray = GetComponent<Camera>().ScreenPointToRay (touch.position); //attached the main camera
if (Physics.Raycast(ray, out hit, 100f, touchInputMask.value))
{
GameObject recipient = hit.transform.gameObject;
touchList.Add(recipient);
if (touch.phase == TouchPhase.Began) {
recipient.SendMessage ("onTouchDown", hit.point, SendMessageOptions.DontRequireReceiver);
}
if (touch.phase == TouchPhase.Ended) {
recipient.SendMessage ("onTouchUp", hit.point, SendMessageOptions.DontRequireReceiver);
}
if (touch.phase == TouchPhase.Stationary || touch.phase == TouchPhase.Moved) {
recipient.SendMessage ("onTouchStay", hit.point, SendMessageOptions.DontRequireReceiver);
}
if (touch.phase == TouchPhase.Canceled) {
recipient.SendMessage ("onTouchExit", hit.point, SendMessageOptions.DontRequireReceiver);
}
}
}
foreach (GameObject g in touchesOld)
{
if (!touchList.Contains(g))
{
g.SendMessage("onTouchExit", hit.point, SendMessageOptions.DontRequireReceiver);
}
}
}
}
}
touchableGameobject:
using UnityEngine;
using System.Collections;
public class touchableGameobject : MonoBehaviour
{
public Color defaultColor;
public Color selectedColor;
private Material mat;
void Start()
{
mat = GetComponent<Renderer>().material;
}
void onTouchDown()
{
mat.color = selectedColor;
}
void onTouchUp()
{
mat.color = defaultColor;
}
void onTouchStay()
{
mat.color = selectedColor;
}
void onTouchExit()
{
mat.color = defaultColor;
}
}
So far, all my application does is reveal the two GameObjects. When I tap them, nothing happens. The code should change the color of the cubes when tapped.
Please, any help would be greatly appreciated. Please guide me along the right path.
Edit: Box Colliders are added. See screenshot.
Okay, after much fiddling and head-pounding, I managed to figure it out. I am posting what worked for me here in the event that some poor future soul can gain some value from it.
I'll answer my own question in the hopes that anybody who comes after me may find this helpful.
The following bits of code worked for me. Attach touchableGameObject.cs
to a Game Object and attach touchableManager.cs
to the ARCamera.
touchableGameObject.cs:
using UnityEngine;
using System.Collections;
public class touchableGameobject : MonoBehaviour
{
public Color defaultColor;
public Color selectedColor;
private Material mat;
void Start()
{
mat = GetComponent<Renderer>().material;
}
void onTouchDown()
{
mat.color = selectedColor;
}
void onTouchUp()
{
mat.color = defaultColor;
}
void onTouchStay()
{
mat.color = selectedColor;
}
void onTouchExit()
{
mat.color = defaultColor;
}
}
touchableManager.cs
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
public class touchableManager : MonoBehaviour
{
public LayerMask touchInputMask;
private List<GameObject> touchList = new List<GameObject>();
private GameObject[] touchesOld;
private RaycastHit hit;
void Update ()
{
if (Input.touchCount > 0)
{
touchesOld = new GameObject[touchList.Count];
touchList.CopyTo(touchesOld);
touchList.Clear();
foreach (Touch touch in Input.touches)
{
Ray ray = Camera.main.ScreenPointToRay(touch.position); //attached the main camera
if (Physics.Raycast(ray, out hit, Mathf.Infinity, touchInputMask.value))
{
GameObject recipient = hit.transform.gameObject;
touchList.Add(recipient);
if (touch.phase == TouchPhase.Began) {
Debug.Log("Touched: " + recipient.name);
recipient.SendMessage ("onTouchDown", hit.point, SendMessageOptions.DontRequireReceiver);
}
if (touch.phase == TouchPhase.Ended) {
recipient.SendMessage ("onTouchUp", hit.point, SendMessageOptions.DontRequireReceiver);
}
if (touch.phase == TouchPhase.Stationary || touch.phase == TouchPhase.Moved) {
recipient.SendMessage ("onTouchStay", hit.point, SendMessageOptions.DontRequireReceiver);
}
if (touch.phase == TouchPhase.Canceled) {
recipient.SendMessage ("onTouchExit", hit.point, SendMessageOptions.DontRequireReceiver);
}
}
}
foreach (GameObject g in touchesOld)
{
if (!touchList.Contains(g))
{
g.SendMessage("onTouchExit", hit.point, SendMessageOptions.DontRequireReceiver);
}
}
}
}
}
In addition, in the Unity editor, on the top right, click the "Layers" dropdown. Edit Layer. I just typed "Touch Input" into a field (e.g. User Layer 8, or whatever is available). Then, select the ARCamera and in the Inspector tab, select your newly-named layer in the "Layer" dropdown. Do the same for each GameObject. Also, in ARCamera, under the "touchableManager" component, you also have to select your newly-named layer in the "Touch Input Mask" dropdown.
Hope this helps someone.